/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_repository.py

  • Committer: John Arbash Meinel
  • Date: 2009-12-22 16:28:47 UTC
  • mto: This revision was merged to the branch mainline in revision 4922.
  • Revision ID: john@arbash-meinel.com-20091222162847-tvnsc69to4l4uf5r
Implement a permute_for_extension helper.

Use it for all of the 'simple' extension permutations.
It basically permutes all tests in the current module, by setting TestCase.module.
Which works well for most of our extension tests. Some had more advanced
handling of permutations (extra permutations, custom vars, etc.)

Show diffs side-by-side

added added

removed removed

Lines of Context:
 
1
# Copyright (C) 2006, 2007, 2008, 2009 Canonical Ltd
 
2
#
 
3
# This program is free software; you can redistribute it and/or modify
 
4
# it under the terms of the GNU General Public License as published by
 
5
# the Free Software Foundation; either version 2 of the License, or
 
6
# (at your option) any later version.
 
7
#
 
8
# This program is distributed in the hope that it will be useful,
 
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
11
# GNU General Public License for more details.
 
12
#
 
13
# You should have received a copy of the GNU General Public License
 
14
# along with this program; if not, write to the Free Software
 
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
 
16
 
 
17
"""Tests for the Repository facility that are not interface tests.
 
18
 
 
19
For interface tests see tests/per_repository/*.py.
 
20
 
 
21
For concrete class tests see this file, and for storage formats tests
 
22
also see this file.
 
23
"""
 
24
 
 
25
from stat import S_ISDIR
 
26
from StringIO import StringIO
 
27
import sys
 
28
 
 
29
import bzrlib
 
30
from bzrlib.errors import (NotBranchError,
 
31
                           NoSuchFile,
 
32
                           UnknownFormatError,
 
33
                           UnsupportedFormatError,
 
34
                           )
 
35
from bzrlib import (
 
36
    graph,
 
37
    tests,
 
38
    )
 
39
from bzrlib.branchbuilder import BranchBuilder
 
40
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
 
41
from bzrlib.index import GraphIndex, InMemoryGraphIndex
 
42
from bzrlib.repository import RepositoryFormat
 
43
from bzrlib.smart import server
 
44
from bzrlib.tests import (
 
45
    TestCase,
 
46
    TestCaseWithTransport,
 
47
    TestSkipped,
 
48
    test_knit,
 
49
    )
 
50
from bzrlib.transport import (
 
51
    fakenfs,
 
52
    get_transport,
 
53
    )
 
54
from bzrlib.transport.memory import MemoryServer
 
55
from bzrlib import (
 
56
    bencode,
 
57
    bzrdir,
 
58
    errors,
 
59
    inventory,
 
60
    osutils,
 
61
    progress,
 
62
    repository,
 
63
    revision as _mod_revision,
 
64
    symbol_versioning,
 
65
    upgrade,
 
66
    workingtree,
 
67
    )
 
68
from bzrlib.repofmt import (
 
69
    groupcompress_repo,
 
70
    knitrepo,
 
71
    pack_repo,
 
72
    weaverepo,
 
73
    )
 
74
 
 
75
 
 
76
class TestDefaultFormat(TestCase):
 
77
 
 
78
    def test_get_set_default_format(self):
 
79
        old_default = bzrdir.format_registry.get('default')
 
80
        private_default = old_default().repository_format.__class__
 
81
        old_format = repository.RepositoryFormat.get_default_format()
 
82
        self.assertTrue(isinstance(old_format, private_default))
 
83
        def make_sample_bzrdir():
 
84
            my_bzrdir = bzrdir.BzrDirMetaFormat1()
 
85
            my_bzrdir.repository_format = SampleRepositoryFormat()
 
86
            return my_bzrdir
 
87
        bzrdir.format_registry.remove('default')
 
88
        bzrdir.format_registry.register('sample', make_sample_bzrdir, '')
 
89
        bzrdir.format_registry.set_default('sample')
 
90
        # creating a repository should now create an instrumented dir.
 
91
        try:
 
92
            # the default branch format is used by the meta dir format
 
93
            # which is not the default bzrdir format at this point
 
94
            dir = bzrdir.BzrDirMetaFormat1().initialize('memory:///')
 
95
            result = dir.create_repository()
 
96
            self.assertEqual(result, 'A bzr repository dir')
 
97
        finally:
 
98
            bzrdir.format_registry.remove('default')
 
99
            bzrdir.format_registry.remove('sample')
 
100
            bzrdir.format_registry.register('default', old_default, '')
 
101
        self.assertIsInstance(repository.RepositoryFormat.get_default_format(),
 
102
                              old_format.__class__)
 
103
 
 
104
 
 
105
class SampleRepositoryFormat(repository.RepositoryFormat):
 
106
    """A sample format
 
107
 
 
108
    this format is initializable, unsupported to aid in testing the
 
109
    open and open(unsupported=True) routines.
 
110
    """
 
111
 
 
112
    def get_format_string(self):
 
113
        """See RepositoryFormat.get_format_string()."""
 
114
        return "Sample .bzr repository format."
 
115
 
 
116
    def initialize(self, a_bzrdir, shared=False):
 
117
        """Initialize a repository in a BzrDir"""
 
118
        t = a_bzrdir.get_repository_transport(self)
 
119
        t.put_bytes('format', self.get_format_string())
 
120
        return 'A bzr repository dir'
 
121
 
 
122
    def is_supported(self):
 
123
        return False
 
124
 
 
125
    def open(self, a_bzrdir, _found=False):
 
126
        return "opened repository."
 
127
 
 
128
 
 
129
class TestRepositoryFormat(TestCaseWithTransport):
 
130
    """Tests for the Repository format detection used by the bzr meta dir facility.BzrBranchFormat facility."""
 
131
 
 
132
    def test_find_format(self):
 
133
        # is the right format object found for a repository?
 
134
        # create a branch with a few known format objects.
 
135
        # this is not quite the same as
 
136
        self.build_tree(["foo/", "bar/"])
 
137
        def check_format(format, url):
 
138
            dir = format._matchingbzrdir.initialize(url)
 
139
            format.initialize(dir)
 
140
            t = get_transport(url)
 
141
            found_format = repository.RepositoryFormat.find_format(dir)
 
142
            self.failUnless(isinstance(found_format, format.__class__))
 
143
        check_format(weaverepo.RepositoryFormat7(), "bar")
 
144
 
 
145
    def test_find_format_no_repository(self):
 
146
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
147
        self.assertRaises(errors.NoRepositoryPresent,
 
148
                          repository.RepositoryFormat.find_format,
 
149
                          dir)
 
150
 
 
151
    def test_find_format_unknown_format(self):
 
152
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
153
        SampleRepositoryFormat().initialize(dir)
 
154
        self.assertRaises(UnknownFormatError,
 
155
                          repository.RepositoryFormat.find_format,
 
156
                          dir)
 
157
 
 
158
    def test_register_unregister_format(self):
 
159
        format = SampleRepositoryFormat()
 
160
        # make a control dir
 
161
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
162
        # make a repo
 
163
        format.initialize(dir)
 
164
        # register a format for it.
 
165
        repository.RepositoryFormat.register_format(format)
 
166
        # which repository.Open will refuse (not supported)
 
167
        self.assertRaises(UnsupportedFormatError, repository.Repository.open, self.get_url())
 
168
        # but open(unsupported) will work
 
169
        self.assertEqual(format.open(dir), "opened repository.")
 
170
        # unregister the format
 
171
        repository.RepositoryFormat.unregister_format(format)
 
172
 
 
173
 
 
174
class TestFormat6(TestCaseWithTransport):
 
175
 
 
176
    def test_attribute__fetch_order(self):
 
177
        """Weaves need topological data insertion."""
 
178
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
179
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
180
        self.assertEqual('topological', repo._format._fetch_order)
 
181
 
 
182
    def test_attribute__fetch_uses_deltas(self):
 
183
        """Weaves do not reuse deltas."""
 
184
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
185
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
186
        self.assertEqual(False, repo._format._fetch_uses_deltas)
 
187
 
 
188
    def test_attribute__fetch_reconcile(self):
 
189
        """Weave repositories need a reconcile after fetch."""
 
190
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
191
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
192
        self.assertEqual(True, repo._format._fetch_reconcile)
 
193
 
 
194
    def test_no_ancestry_weave(self):
 
195
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
196
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
197
        # We no longer need to create the ancestry.weave file
 
198
        # since it is *never* used.
 
199
        self.assertRaises(NoSuchFile,
 
200
                          control.transport.get,
 
201
                          'ancestry.weave')
 
202
 
 
203
    def test_supports_external_lookups(self):
 
204
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
205
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
206
        self.assertFalse(repo._format.supports_external_lookups)
 
207
 
 
208
 
 
209
class TestFormat7(TestCaseWithTransport):
 
210
 
 
211
    def test_attribute__fetch_order(self):
 
212
        """Weaves need topological data insertion."""
 
213
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
214
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
215
        self.assertEqual('topological', repo._format._fetch_order)
 
216
 
 
217
    def test_attribute__fetch_uses_deltas(self):
 
218
        """Weaves do not reuse deltas."""
 
219
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
220
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
221
        self.assertEqual(False, repo._format._fetch_uses_deltas)
 
222
 
 
223
    def test_attribute__fetch_reconcile(self):
 
224
        """Weave repositories need a reconcile after fetch."""
 
225
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
226
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
227
        self.assertEqual(True, repo._format._fetch_reconcile)
 
228
 
 
229
    def test_disk_layout(self):
 
230
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
231
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
232
        # in case of side effects of locking.
 
233
        repo.lock_write()
 
234
        repo.unlock()
 
235
        # we want:
 
236
        # format 'Bazaar-NG Repository format 7'
 
237
        # lock ''
 
238
        # inventory.weave == empty_weave
 
239
        # empty revision-store directory
 
240
        # empty weaves directory
 
241
        t = control.get_repository_transport(None)
 
242
        self.assertEqualDiff('Bazaar-NG Repository format 7',
 
243
                             t.get('format').read())
 
244
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
 
245
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
 
246
        self.assertEqualDiff('# bzr weave file v5\n'
 
247
                             'w\n'
 
248
                             'W\n',
 
249
                             t.get('inventory.weave').read())
 
250
        # Creating a file with id Foo:Bar results in a non-escaped file name on
 
251
        # disk.
 
252
        control.create_branch()
 
253
        tree = control.create_workingtree()
 
254
        tree.add(['foo'], ['Foo:Bar'], ['file'])
 
255
        tree.put_file_bytes_non_atomic('Foo:Bar', 'content\n')
 
256
        try:
 
257
            tree.commit('first post', rev_id='first')
 
258
        except errors.IllegalPath:
 
259
            if sys.platform != 'win32':
 
260
                raise
 
261
            self.knownFailure('Foo:Bar cannot be used as a file-id on windows'
 
262
                              ' in repo format 7')
 
263
            return
 
264
        self.assertEqualDiff(
 
265
            '# bzr weave file v5\n'
 
266
            'i\n'
 
267
            '1 7fe70820e08a1aac0ef224d9c66ab66831cc4ab1\n'
 
268
            'n first\n'
 
269
            '\n'
 
270
            'w\n'
 
271
            '{ 0\n'
 
272
            '. content\n'
 
273
            '}\n'
 
274
            'W\n',
 
275
            t.get('weaves/74/Foo%3ABar.weave').read())
 
276
 
 
277
    def test_shared_disk_layout(self):
 
278
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
279
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
280
        # we want:
 
281
        # format 'Bazaar-NG Repository format 7'
 
282
        # inventory.weave == empty_weave
 
283
        # empty revision-store directory
 
284
        # empty weaves directory
 
285
        # a 'shared-storage' marker file.
 
286
        # lock is not present when unlocked
 
287
        t = control.get_repository_transport(None)
 
288
        self.assertEqualDiff('Bazaar-NG Repository format 7',
 
289
                             t.get('format').read())
 
290
        self.assertEqualDiff('', t.get('shared-storage').read())
 
291
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
 
292
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
 
293
        self.assertEqualDiff('# bzr weave file v5\n'
 
294
                             'w\n'
 
295
                             'W\n',
 
296
                             t.get('inventory.weave').read())
 
297
        self.assertFalse(t.has('branch-lock'))
 
298
 
 
299
    def test_creates_lockdir(self):
 
300
        """Make sure it appears to be controlled by a LockDir existence"""
 
301
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
302
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
303
        t = control.get_repository_transport(None)
 
304
        # TODO: Should check there is a 'lock' toplevel directory,
 
305
        # regardless of contents
 
306
        self.assertFalse(t.has('lock/held/info'))
 
307
        repo.lock_write()
 
308
        try:
 
309
            self.assertTrue(t.has('lock/held/info'))
 
310
        finally:
 
311
            # unlock so we don't get a warning about failing to do so
 
312
            repo.unlock()
 
313
 
 
314
    def test_uses_lockdir(self):
 
315
        """repo format 7 actually locks on lockdir"""
 
316
        base_url = self.get_url()
 
317
        control = bzrdir.BzrDirMetaFormat1().initialize(base_url)
 
318
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
319
        t = control.get_repository_transport(None)
 
320
        repo.lock_write()
 
321
        repo.unlock()
 
322
        del repo
 
323
        # make sure the same lock is created by opening it
 
324
        repo = repository.Repository.open(base_url)
 
325
        repo.lock_write()
 
326
        self.assertTrue(t.has('lock/held/info'))
 
327
        repo.unlock()
 
328
        self.assertFalse(t.has('lock/held/info'))
 
329
 
 
330
    def test_shared_no_tree_disk_layout(self):
 
331
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
332
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
333
        repo.set_make_working_trees(False)
 
334
        # we want:
 
335
        # format 'Bazaar-NG Repository format 7'
 
336
        # lock ''
 
337
        # inventory.weave == empty_weave
 
338
        # empty revision-store directory
 
339
        # empty weaves directory
 
340
        # a 'shared-storage' marker file.
 
341
        t = control.get_repository_transport(None)
 
342
        self.assertEqualDiff('Bazaar-NG Repository format 7',
 
343
                             t.get('format').read())
 
344
        ## self.assertEqualDiff('', t.get('lock').read())
 
345
        self.assertEqualDiff('', t.get('shared-storage').read())
 
346
        self.assertEqualDiff('', t.get('no-working-trees').read())
 
347
        repo.set_make_working_trees(True)
 
348
        self.assertFalse(t.has('no-working-trees'))
 
349
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
 
350
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
 
351
        self.assertEqualDiff('# bzr weave file v5\n'
 
352
                             'w\n'
 
353
                             'W\n',
 
354
                             t.get('inventory.weave').read())
 
355
 
 
356
    def test_supports_external_lookups(self):
 
357
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
358
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
359
        self.assertFalse(repo._format.supports_external_lookups)
 
360
 
 
361
 
 
362
class TestFormatKnit1(TestCaseWithTransport):
 
363
 
 
364
    def test_attribute__fetch_order(self):
 
365
        """Knits need topological data insertion."""
 
366
        repo = self.make_repository('.',
 
367
                format=bzrdir.format_registry.get('knit')())
 
368
        self.assertEqual('topological', repo._format._fetch_order)
 
369
 
 
370
    def test_attribute__fetch_uses_deltas(self):
 
371
        """Knits reuse deltas."""
 
372
        repo = self.make_repository('.',
 
373
                format=bzrdir.format_registry.get('knit')())
 
374
        self.assertEqual(True, repo._format._fetch_uses_deltas)
 
375
 
 
376
    def test_disk_layout(self):
 
377
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
378
        repo = knitrepo.RepositoryFormatKnit1().initialize(control)
 
379
        # in case of side effects of locking.
 
380
        repo.lock_write()
 
381
        repo.unlock()
 
382
        # we want:
 
383
        # format 'Bazaar-NG Knit Repository Format 1'
 
384
        # lock: is a directory
 
385
        # inventory.weave == empty_weave
 
386
        # empty revision-store directory
 
387
        # empty weaves directory
 
388
        t = control.get_repository_transport(None)
 
389
        self.assertEqualDiff('Bazaar-NG Knit Repository Format 1',
 
390
                             t.get('format').read())
 
391
        # XXX: no locks left when unlocked at the moment
 
392
        # self.assertEqualDiff('', t.get('lock').read())
 
393
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
 
394
        self.check_knits(t)
 
395
        # Check per-file knits.
 
396
        branch = control.create_branch()
 
397
        tree = control.create_workingtree()
 
398
        tree.add(['foo'], ['Nasty-IdC:'], ['file'])
 
399
        tree.put_file_bytes_non_atomic('Nasty-IdC:', '')
 
400
        tree.commit('1st post', rev_id='foo')
 
401
        self.assertHasKnit(t, 'knits/e8/%254easty-%2549d%2543%253a',
 
402
            '\nfoo fulltext 0 81  :')
 
403
 
 
404
    def assertHasKnit(self, t, knit_name, extra_content=''):
 
405
        """Assert that knit_name exists on t."""
 
406
        self.assertEqualDiff('# bzr knit index 8\n' + extra_content,
 
407
                             t.get(knit_name + '.kndx').read())
 
408
 
 
409
    def check_knits(self, t):
 
410
        """check knit content for a repository."""
 
411
        self.assertHasKnit(t, 'inventory')
 
412
        self.assertHasKnit(t, 'revisions')
 
413
        self.assertHasKnit(t, 'signatures')
 
414
 
 
415
    def test_shared_disk_layout(self):
 
416
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
417
        repo = knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
 
418
        # we want:
 
419
        # format 'Bazaar-NG Knit Repository Format 1'
 
420
        # lock: is a directory
 
421
        # inventory.weave == empty_weave
 
422
        # empty revision-store directory
 
423
        # empty weaves directory
 
424
        # a 'shared-storage' marker file.
 
425
        t = control.get_repository_transport(None)
 
426
        self.assertEqualDiff('Bazaar-NG Knit Repository Format 1',
 
427
                             t.get('format').read())
 
428
        # XXX: no locks left when unlocked at the moment
 
429
        # self.assertEqualDiff('', t.get('lock').read())
 
430
        self.assertEqualDiff('', t.get('shared-storage').read())
 
431
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
 
432
        self.check_knits(t)
 
433
 
 
434
    def test_shared_no_tree_disk_layout(self):
 
435
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
436
        repo = knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
 
437
        repo.set_make_working_trees(False)
 
438
        # we want:
 
439
        # format 'Bazaar-NG Knit Repository Format 1'
 
440
        # lock ''
 
441
        # inventory.weave == empty_weave
 
442
        # empty revision-store directory
 
443
        # empty weaves directory
 
444
        # a 'shared-storage' marker file.
 
445
        t = control.get_repository_transport(None)
 
446
        self.assertEqualDiff('Bazaar-NG Knit Repository Format 1',
 
447
                             t.get('format').read())
 
448
        # XXX: no locks left when unlocked at the moment
 
449
        # self.assertEqualDiff('', t.get('lock').read())
 
450
        self.assertEqualDiff('', t.get('shared-storage').read())
 
451
        self.assertEqualDiff('', t.get('no-working-trees').read())
 
452
        repo.set_make_working_trees(True)
 
453
        self.assertFalse(t.has('no-working-trees'))
 
454
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
 
455
        self.check_knits(t)
 
456
 
 
457
    def test_deserialise_sets_root_revision(self):
 
458
        """We must have a inventory.root.revision
 
459
 
 
460
        Old versions of the XML5 serializer did not set the revision_id for
 
461
        the whole inventory. So we grab the one from the expected text. Which
 
462
        is valid when the api is not being abused.
 
463
        """
 
464
        repo = self.make_repository('.',
 
465
                format=bzrdir.format_registry.get('knit')())
 
466
        inv_xml = '<inventory format="5">\n</inventory>\n'
 
467
        inv = repo.deserialise_inventory('test-rev-id', inv_xml)
 
468
        self.assertEqual('test-rev-id', inv.root.revision)
 
469
 
 
470
    def test_deserialise_uses_global_revision_id(self):
 
471
        """If it is set, then we re-use the global revision id"""
 
472
        repo = self.make_repository('.',
 
473
                format=bzrdir.format_registry.get('knit')())
 
474
        inv_xml = ('<inventory format="5" revision_id="other-rev-id">\n'
 
475
                   '</inventory>\n')
 
476
        # Arguably, the deserialise_inventory should detect a mismatch, and
 
477
        # raise an error, rather than silently using one revision_id over the
 
478
        # other.
 
479
        self.assertRaises(AssertionError, repo.deserialise_inventory,
 
480
            'test-rev-id', inv_xml)
 
481
        inv = repo.deserialise_inventory('other-rev-id', inv_xml)
 
482
        self.assertEqual('other-rev-id', inv.root.revision)
 
483
 
 
484
    def test_supports_external_lookups(self):
 
485
        repo = self.make_repository('.',
 
486
                format=bzrdir.format_registry.get('knit')())
 
487
        self.assertFalse(repo._format.supports_external_lookups)
 
488
 
 
489
 
 
490
class DummyRepository(object):
 
491
    """A dummy repository for testing."""
 
492
 
 
493
    _format = None
 
494
    _serializer = None
 
495
 
 
496
    def supports_rich_root(self):
 
497
        if self._format is not None:
 
498
            return self._format.rich_root_data
 
499
        return False
 
500
 
 
501
    def get_graph(self):
 
502
        raise NotImplementedError
 
503
 
 
504
    def get_parent_map(self, revision_ids):
 
505
        raise NotImplementedError
 
506
 
 
507
 
 
508
class InterDummy(repository.InterRepository):
 
509
    """An inter-repository optimised code path for DummyRepository.
 
510
 
 
511
    This is for use during testing where we use DummyRepository as repositories
 
512
    so that none of the default regsitered inter-repository classes will
 
513
    MATCH.
 
514
    """
 
515
 
 
516
    @staticmethod
 
517
    def is_compatible(repo_source, repo_target):
 
518
        """InterDummy is compatible with DummyRepository."""
 
519
        return (isinstance(repo_source, DummyRepository) and
 
520
            isinstance(repo_target, DummyRepository))
 
521
 
 
522
 
 
523
class TestInterRepository(TestCaseWithTransport):
 
524
 
 
525
    def test_get_default_inter_repository(self):
 
526
        # test that the InterRepository.get(repo_a, repo_b) probes
 
527
        # for a inter_repo class where is_compatible(repo_a, repo_b) returns
 
528
        # true and returns a default inter_repo otherwise.
 
529
        # This also tests that the default registered optimised interrepository
 
530
        # classes do not barf inappropriately when a surprising repository type
 
531
        # is handed to them.
 
532
        dummy_a = DummyRepository()
 
533
        dummy_b = DummyRepository()
 
534
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
 
535
 
 
536
    def assertGetsDefaultInterRepository(self, repo_a, repo_b):
 
537
        """Asserts that InterRepository.get(repo_a, repo_b) -> the default.
 
538
 
 
539
        The effective default is now InterSameDataRepository because there is
 
540
        no actual sane default in the presence of incompatible data models.
 
541
        """
 
542
        inter_repo = repository.InterRepository.get(repo_a, repo_b)
 
543
        self.assertEqual(repository.InterSameDataRepository,
 
544
                         inter_repo.__class__)
 
545
        self.assertEqual(repo_a, inter_repo.source)
 
546
        self.assertEqual(repo_b, inter_repo.target)
 
547
 
 
548
    def test_register_inter_repository_class(self):
 
549
        # test that a optimised code path provider - a
 
550
        # InterRepository subclass can be registered and unregistered
 
551
        # and that it is correctly selected when given a repository
 
552
        # pair that it returns true on for the is_compatible static method
 
553
        # check
 
554
        dummy_a = DummyRepository()
 
555
        dummy_a._format = RepositoryFormat()
 
556
        dummy_b = DummyRepository()
 
557
        dummy_b._format = RepositoryFormat()
 
558
        repo = self.make_repository('.')
 
559
        # hack dummies to look like repo somewhat.
 
560
        dummy_a._serializer = repo._serializer
 
561
        dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
 
562
        dummy_a._format.rich_root_data = repo._format.rich_root_data
 
563
        dummy_b._serializer = repo._serializer
 
564
        dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
 
565
        dummy_b._format.rich_root_data = repo._format.rich_root_data
 
566
        repository.InterRepository.register_optimiser(InterDummy)
 
567
        try:
 
568
            # we should get the default for something InterDummy returns False
 
569
            # to
 
570
            self.assertFalse(InterDummy.is_compatible(dummy_a, repo))
 
571
            self.assertGetsDefaultInterRepository(dummy_a, repo)
 
572
            # and we should get an InterDummy for a pair it 'likes'
 
573
            self.assertTrue(InterDummy.is_compatible(dummy_a, dummy_b))
 
574
            inter_repo = repository.InterRepository.get(dummy_a, dummy_b)
 
575
            self.assertEqual(InterDummy, inter_repo.__class__)
 
576
            self.assertEqual(dummy_a, inter_repo.source)
 
577
            self.assertEqual(dummy_b, inter_repo.target)
 
578
        finally:
 
579
            repository.InterRepository.unregister_optimiser(InterDummy)
 
580
        # now we should get the default InterRepository object again.
 
581
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
 
582
 
 
583
 
 
584
class TestInterWeaveRepo(TestCaseWithTransport):
 
585
 
 
586
    def test_is_compatible_and_registered(self):
 
587
        # InterWeaveRepo is compatible when either side
 
588
        # is a format 5/6/7 branch
 
589
        from bzrlib.repofmt import knitrepo, weaverepo
 
590
        formats = [weaverepo.RepositoryFormat5(),
 
591
                   weaverepo.RepositoryFormat6(),
 
592
                   weaverepo.RepositoryFormat7()]
 
593
        incompatible_formats = [weaverepo.RepositoryFormat4(),
 
594
                                knitrepo.RepositoryFormatKnit1(),
 
595
                                ]
 
596
        repo_a = self.make_repository('a')
 
597
        repo_b = self.make_repository('b')
 
598
        is_compatible = repository.InterWeaveRepo.is_compatible
 
599
        for source in incompatible_formats:
 
600
            # force incompatible left then right
 
601
            repo_a._format = source
 
602
            repo_b._format = formats[0]
 
603
            self.assertFalse(is_compatible(repo_a, repo_b))
 
604
            self.assertFalse(is_compatible(repo_b, repo_a))
 
605
        for source in formats:
 
606
            repo_a._format = source
 
607
            for target in formats:
 
608
                repo_b._format = target
 
609
                self.assertTrue(is_compatible(repo_a, repo_b))
 
610
        self.assertEqual(repository.InterWeaveRepo,
 
611
                         repository.InterRepository.get(repo_a,
 
612
                                                        repo_b).__class__)
 
613
 
 
614
 
 
615
class TestRepositoryConverter(TestCaseWithTransport):
 
616
 
 
617
    def test_convert_empty(self):
 
618
        t = get_transport(self.get_url('.'))
 
619
        t.mkdir('repository')
 
620
        repo_dir = bzrdir.BzrDirMetaFormat1().initialize('repository')
 
621
        repo = weaverepo.RepositoryFormat7().initialize(repo_dir)
 
622
        target_format = knitrepo.RepositoryFormatKnit1()
 
623
        converter = repository.CopyConverter(target_format)
 
624
        pb = bzrlib.ui.ui_factory.nested_progress_bar()
 
625
        try:
 
626
            converter.convert(repo, pb)
 
627
        finally:
 
628
            pb.finished()
 
629
        repo = repo_dir.open_repository()
 
630
        self.assertTrue(isinstance(target_format, repo._format.__class__))
 
631
 
 
632
 
 
633
class TestMisc(TestCase):
 
634
 
 
635
    def test_unescape_xml(self):
 
636
        """We get some kind of error when malformed entities are passed"""
 
637
        self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;')
 
638
 
 
639
 
 
640
class TestRepositoryFormatKnit3(TestCaseWithTransport):
 
641
 
 
642
    def test_attribute__fetch_order(self):
 
643
        """Knits need topological data insertion."""
 
644
        format = bzrdir.BzrDirMetaFormat1()
 
645
        format.repository_format = knitrepo.RepositoryFormatKnit3()
 
646
        repo = self.make_repository('.', format=format)
 
647
        self.assertEqual('topological', repo._format._fetch_order)
 
648
 
 
649
    def test_attribute__fetch_uses_deltas(self):
 
650
        """Knits reuse deltas."""
 
651
        format = bzrdir.BzrDirMetaFormat1()
 
652
        format.repository_format = knitrepo.RepositoryFormatKnit3()
 
653
        repo = self.make_repository('.', format=format)
 
654
        self.assertEqual(True, repo._format._fetch_uses_deltas)
 
655
 
 
656
    def test_convert(self):
 
657
        """Ensure the upgrade adds weaves for roots"""
 
658
        format = bzrdir.BzrDirMetaFormat1()
 
659
        format.repository_format = knitrepo.RepositoryFormatKnit1()
 
660
        tree = self.make_branch_and_tree('.', format)
 
661
        tree.commit("Dull commit", rev_id="dull")
 
662
        revision_tree = tree.branch.repository.revision_tree('dull')
 
663
        revision_tree.lock_read()
 
664
        try:
 
665
            self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
 
666
                revision_tree.inventory.root.file_id)
 
667
        finally:
 
668
            revision_tree.unlock()
 
669
        format = bzrdir.BzrDirMetaFormat1()
 
670
        format.repository_format = knitrepo.RepositoryFormatKnit3()
 
671
        upgrade.Convert('.', format)
 
672
        tree = workingtree.WorkingTree.open('.')
 
673
        revision_tree = tree.branch.repository.revision_tree('dull')
 
674
        revision_tree.lock_read()
 
675
        try:
 
676
            revision_tree.get_file_lines(revision_tree.inventory.root.file_id)
 
677
        finally:
 
678
            revision_tree.unlock()
 
679
        tree.commit("Another dull commit", rev_id='dull2')
 
680
        revision_tree = tree.branch.repository.revision_tree('dull2')
 
681
        revision_tree.lock_read()
 
682
        self.addCleanup(revision_tree.unlock)
 
683
        self.assertEqual('dull', revision_tree.inventory.root.revision)
 
684
 
 
685
    def test_supports_external_lookups(self):
 
686
        format = bzrdir.BzrDirMetaFormat1()
 
687
        format.repository_format = knitrepo.RepositoryFormatKnit3()
 
688
        repo = self.make_repository('.', format=format)
 
689
        self.assertFalse(repo._format.supports_external_lookups)
 
690
 
 
691
 
 
692
class Test2a(tests.TestCaseWithMemoryTransport):
 
693
 
 
694
    def test_fetch_combines_groups(self):
 
695
        builder = self.make_branch_builder('source', format='2a')
 
696
        builder.start_series()
 
697
        builder.build_snapshot('1', None, [
 
698
            ('add', ('', 'root-id', 'directory', '')),
 
699
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
700
        builder.build_snapshot('2', ['1'], [
 
701
            ('modify', ('file-id', 'content-2\n'))])
 
702
        builder.finish_series()
 
703
        source = builder.get_branch()
 
704
        target = self.make_repository('target', format='2a')
 
705
        target.fetch(source.repository)
 
706
        target.lock_read()
 
707
        self.addCleanup(target.unlock)
 
708
        details = target.texts._index.get_build_details(
 
709
            [('file-id', '1',), ('file-id', '2',)])
 
710
        file_1_details = details[('file-id', '1')]
 
711
        file_2_details = details[('file-id', '2')]
 
712
        # The index, and what to read off disk, should be the same for both
 
713
        # versions of the file.
 
714
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
715
 
 
716
    def test_fetch_combines_groups(self):
 
717
        builder = self.make_branch_builder('source', format='2a')
 
718
        builder.start_series()
 
719
        builder.build_snapshot('1', None, [
 
720
            ('add', ('', 'root-id', 'directory', '')),
 
721
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
722
        builder.build_snapshot('2', ['1'], [
 
723
            ('modify', ('file-id', 'content-2\n'))])
 
724
        builder.finish_series()
 
725
        source = builder.get_branch()
 
726
        target = self.make_repository('target', format='2a')
 
727
        target.fetch(source.repository)
 
728
        target.lock_read()
 
729
        self.addCleanup(target.unlock)
 
730
        details = target.texts._index.get_build_details(
 
731
            [('file-id', '1',), ('file-id', '2',)])
 
732
        file_1_details = details[('file-id', '1')]
 
733
        file_2_details = details[('file-id', '2')]
 
734
        # The index, and what to read off disk, should be the same for both
 
735
        # versions of the file.
 
736
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
737
 
 
738
    def test_fetch_combines_groups(self):
 
739
        builder = self.make_branch_builder('source', format='2a')
 
740
        builder.start_series()
 
741
        builder.build_snapshot('1', None, [
 
742
            ('add', ('', 'root-id', 'directory', '')),
 
743
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
744
        builder.build_snapshot('2', ['1'], [
 
745
            ('modify', ('file-id', 'content-2\n'))])
 
746
        builder.finish_series()
 
747
        source = builder.get_branch()
 
748
        target = self.make_repository('target', format='2a')
 
749
        target.fetch(source.repository)
 
750
        target.lock_read()
 
751
        self.addCleanup(target.unlock)
 
752
        details = target.texts._index.get_build_details(
 
753
            [('file-id', '1',), ('file-id', '2',)])
 
754
        file_1_details = details[('file-id', '1')]
 
755
        file_2_details = details[('file-id', '2')]
 
756
        # The index, and what to read off disk, should be the same for both
 
757
        # versions of the file.
 
758
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
759
 
 
760
    def test_format_pack_compresses_True(self):
 
761
        repo = self.make_repository('repo', format='2a')
 
762
        self.assertTrue(repo._format.pack_compresses)
 
763
 
 
764
    def test_inventories_use_chk_map_with_parent_base_dict(self):
 
765
        tree = self.make_branch_and_memory_tree('repo', format="2a")
 
766
        tree.lock_write()
 
767
        tree.add([''], ['TREE_ROOT'])
 
768
        revid = tree.commit("foo")
 
769
        tree.unlock()
 
770
        tree.lock_read()
 
771
        self.addCleanup(tree.unlock)
 
772
        inv = tree.branch.repository.get_inventory(revid)
 
773
        self.assertNotEqual(None, inv.parent_id_basename_to_file_id)
 
774
        inv.parent_id_basename_to_file_id._ensure_root()
 
775
        inv.id_to_entry._ensure_root()
 
776
        self.assertEqual(65536, inv.id_to_entry._root_node.maximum_size)
 
777
        self.assertEqual(65536,
 
778
            inv.parent_id_basename_to_file_id._root_node.maximum_size)
 
779
 
 
780
    def test_autopack_unchanged_chk_nodes(self):
 
781
        # at 20 unchanged commits, chk pages are packed that are split into
 
782
        # two groups such that the new pack being made doesn't have all its
 
783
        # pages in the source packs (though they are in the repository).
 
784
        # Use a memory backed repository, we don't need to hit disk for this
 
785
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
786
        tree.lock_write()
 
787
        self.addCleanup(tree.unlock)
 
788
        tree.add([''], ['TREE_ROOT'])
 
789
        for pos in range(20):
 
790
            tree.commit(str(pos))
 
791
 
 
792
    def test_pack_with_hint(self):
 
793
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
794
        tree.lock_write()
 
795
        self.addCleanup(tree.unlock)
 
796
        tree.add([''], ['TREE_ROOT'])
 
797
        # 1 commit to leave untouched
 
798
        tree.commit('1')
 
799
        to_keep = tree.branch.repository._pack_collection.names()
 
800
        # 2 to combine
 
801
        tree.commit('2')
 
802
        tree.commit('3')
 
803
        all = tree.branch.repository._pack_collection.names()
 
804
        combine = list(set(all) - set(to_keep))
 
805
        self.assertLength(3, all)
 
806
        self.assertLength(2, combine)
 
807
        tree.branch.repository.pack(hint=combine)
 
808
        final = tree.branch.repository._pack_collection.names()
 
809
        self.assertLength(2, final)
 
810
        self.assertFalse(combine[0] in final)
 
811
        self.assertFalse(combine[1] in final)
 
812
        self.assertSubset(to_keep, final)
 
813
 
 
814
    def test_stream_source_to_gc(self):
 
815
        source = self.make_repository('source', format='2a')
 
816
        target = self.make_repository('target', format='2a')
 
817
        stream = source._get_source(target._format)
 
818
        self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
 
819
 
 
820
    def test_stream_source_to_non_gc(self):
 
821
        source = self.make_repository('source', format='2a')
 
822
        target = self.make_repository('target', format='rich-root-pack')
 
823
        stream = source._get_source(target._format)
 
824
        # We don't want the child GroupCHKStreamSource
 
825
        self.assertIs(type(stream), repository.StreamSource)
 
826
 
 
827
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
 
828
        source_builder = self.make_branch_builder('source',
 
829
                            format='2a')
 
830
        # We have to build a fairly large tree, so that we are sure the chk
 
831
        # pages will have split into multiple pages.
 
832
        entries = [('add', ('', 'a-root-id', 'directory', None))]
 
833
        for i in 'abcdefghijklmnopqrstuvwxyz123456789':
 
834
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
 
835
                fname = i + j
 
836
                fid = fname + '-id'
 
837
                content = 'content for %s\n' % (fname,)
 
838
                entries.append(('add', (fname, fid, 'file', content)))
 
839
        source_builder.start_series()
 
840
        source_builder.build_snapshot('rev-1', None, entries)
 
841
        # Now change a few of them, so we get a few new pages for the second
 
842
        # revision
 
843
        source_builder.build_snapshot('rev-2', ['rev-1'], [
 
844
            ('modify', ('aa-id', 'new content for aa-id\n')),
 
845
            ('modify', ('cc-id', 'new content for cc-id\n')),
 
846
            ('modify', ('zz-id', 'new content for zz-id\n')),
 
847
            ])
 
848
        source_builder.finish_series()
 
849
        source_branch = source_builder.get_branch()
 
850
        source_branch.lock_read()
 
851
        self.addCleanup(source_branch.unlock)
 
852
        target = self.make_repository('target', format='2a')
 
853
        source = source_branch.repository._get_source(target._format)
 
854
        self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
 
855
 
 
856
        # On a regular pass, getting the inventories and chk pages for rev-2
 
857
        # would only get the newly created chk pages
 
858
        search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1,
 
859
                                    set(['rev-2']))
 
860
        simple_chk_records = []
 
861
        for vf_name, substream in source.get_stream(search):
 
862
            if vf_name == 'chk_bytes':
 
863
                for record in substream:
 
864
                    simple_chk_records.append(record.key)
 
865
            else:
 
866
                for _ in substream:
 
867
                    continue
 
868
        # 3 pages, the root (InternalNode), + 2 pages which actually changed
 
869
        self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
 
870
                          ('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
 
871
                          ('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
 
872
                          ('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
 
873
                         simple_chk_records)
 
874
        # Now, when we do a similar call using 'get_stream_for_missing_keys'
 
875
        # we should get a much larger set of pages.
 
876
        missing = [('inventories', 'rev-2')]
 
877
        full_chk_records = []
 
878
        for vf_name, substream in source.get_stream_for_missing_keys(missing):
 
879
            if vf_name == 'inventories':
 
880
                for record in substream:
 
881
                    self.assertEqual(('rev-2',), record.key)
 
882
            elif vf_name == 'chk_bytes':
 
883
                for record in substream:
 
884
                    full_chk_records.append(record.key)
 
885
            else:
 
886
                self.fail('Should not be getting a stream of %s' % (vf_name,))
 
887
        # We have 257 records now. This is because we have 1 root page, and 256
 
888
        # leaf pages in a complete listing.
 
889
        self.assertEqual(257, len(full_chk_records))
 
890
        self.assertSubset(simple_chk_records, full_chk_records)
 
891
 
 
892
    def test_inconsistency_fatal(self):
 
893
        repo = self.make_repository('repo', format='2a')
 
894
        self.assertTrue(repo.revisions._index._inconsistency_fatal)
 
895
        self.assertFalse(repo.texts._index._inconsistency_fatal)
 
896
        self.assertFalse(repo.inventories._index._inconsistency_fatal)
 
897
        self.assertFalse(repo.signatures._index._inconsistency_fatal)
 
898
        self.assertFalse(repo.chk_bytes._index._inconsistency_fatal)
 
899
 
 
900
 
 
901
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport):
 
902
 
 
903
    def test_source_to_exact_pack_092(self):
 
904
        source = self.make_repository('source', format='pack-0.92')
 
905
        target = self.make_repository('target', format='pack-0.92')
 
906
        stream_source = source._get_source(target._format)
 
907
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
908
 
 
909
    def test_source_to_exact_pack_rich_root_pack(self):
 
910
        source = self.make_repository('source', format='rich-root-pack')
 
911
        target = self.make_repository('target', format='rich-root-pack')
 
912
        stream_source = source._get_source(target._format)
 
913
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
914
 
 
915
    def test_source_to_exact_pack_19(self):
 
916
        source = self.make_repository('source', format='1.9')
 
917
        target = self.make_repository('target', format='1.9')
 
918
        stream_source = source._get_source(target._format)
 
919
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
920
 
 
921
    def test_source_to_exact_pack_19_rich_root(self):
 
922
        source = self.make_repository('source', format='1.9-rich-root')
 
923
        target = self.make_repository('target', format='1.9-rich-root')
 
924
        stream_source = source._get_source(target._format)
 
925
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
926
 
 
927
    def test_source_to_remote_exact_pack_19(self):
 
928
        trans = self.make_smart_server('target')
 
929
        trans.ensure_base()
 
930
        source = self.make_repository('source', format='1.9')
 
931
        target = self.make_repository('target', format='1.9')
 
932
        target = repository.Repository.open(trans.base)
 
933
        stream_source = source._get_source(target._format)
 
934
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
935
 
 
936
    def test_stream_source_to_non_exact(self):
 
937
        source = self.make_repository('source', format='pack-0.92')
 
938
        target = self.make_repository('target', format='1.9')
 
939
        stream = source._get_source(target._format)
 
940
        self.assertIs(type(stream), repository.StreamSource)
 
941
 
 
942
    def test_stream_source_to_non_exact_rich_root(self):
 
943
        source = self.make_repository('source', format='1.9')
 
944
        target = self.make_repository('target', format='1.9-rich-root')
 
945
        stream = source._get_source(target._format)
 
946
        self.assertIs(type(stream), repository.StreamSource)
 
947
 
 
948
    def test_source_to_remote_non_exact_pack_19(self):
 
949
        trans = self.make_smart_server('target')
 
950
        trans.ensure_base()
 
951
        source = self.make_repository('source', format='1.9')
 
952
        target = self.make_repository('target', format='1.6')
 
953
        target = repository.Repository.open(trans.base)
 
954
        stream_source = source._get_source(target._format)
 
955
        self.assertIs(type(stream_source), repository.StreamSource)
 
956
 
 
957
    def test_stream_source_to_knit(self):
 
958
        source = self.make_repository('source', format='pack-0.92')
 
959
        target = self.make_repository('target', format='dirstate')
 
960
        stream = source._get_source(target._format)
 
961
        self.assertIs(type(stream), repository.StreamSource)
 
962
 
 
963
 
 
964
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
 
965
    """Tests for _find_parent_ids_of_revisions."""
 
966
 
 
967
    def setUp(self):
 
968
        super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
 
969
        self.builder = self.make_branch_builder('source',
 
970
            format='development6-rich-root')
 
971
        self.builder.start_series()
 
972
        self.builder.build_snapshot('initial', None,
 
973
            [('add', ('', 'tree-root', 'directory', None))])
 
974
        self.repo = self.builder.get_branch().repository
 
975
        self.addCleanup(self.builder.finish_series)
 
976
 
 
977
    def assertParentIds(self, expected_result, rev_set):
 
978
        self.assertEqual(sorted(expected_result),
 
979
            sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
 
980
 
 
981
    def test_simple(self):
 
982
        self.builder.build_snapshot('revid1', None, [])
 
983
        self.builder.build_snapshot('revid2', ['revid1'], [])
 
984
        rev_set = ['revid2']
 
985
        self.assertParentIds(['revid1'], rev_set)
 
986
 
 
987
    def test_not_first_parent(self):
 
988
        self.builder.build_snapshot('revid1', None, [])
 
989
        self.builder.build_snapshot('revid2', ['revid1'], [])
 
990
        self.builder.build_snapshot('revid3', ['revid2'], [])
 
991
        rev_set = ['revid3', 'revid2']
 
992
        self.assertParentIds(['revid1'], rev_set)
 
993
 
 
994
    def test_not_null(self):
 
995
        rev_set = ['initial']
 
996
        self.assertParentIds([], rev_set)
 
997
 
 
998
    def test_not_null_set(self):
 
999
        self.builder.build_snapshot('revid1', None, [])
 
1000
        rev_set = [_mod_revision.NULL_REVISION]
 
1001
        self.assertParentIds([], rev_set)
 
1002
 
 
1003
    def test_ghost(self):
 
1004
        self.builder.build_snapshot('revid1', None, [])
 
1005
        rev_set = ['ghost', 'revid1']
 
1006
        self.assertParentIds(['initial'], rev_set)
 
1007
 
 
1008
    def test_ghost_parent(self):
 
1009
        self.builder.build_snapshot('revid1', None, [])
 
1010
        self.builder.build_snapshot('revid2', ['revid1', 'ghost'], [])
 
1011
        rev_set = ['revid2', 'revid1']
 
1012
        self.assertParentIds(['ghost', 'initial'], rev_set)
 
1013
 
 
1014
    def test_righthand_parent(self):
 
1015
        self.builder.build_snapshot('revid1', None, [])
 
1016
        self.builder.build_snapshot('revid2a', ['revid1'], [])
 
1017
        self.builder.build_snapshot('revid2b', ['revid1'], [])
 
1018
        self.builder.build_snapshot('revid3', ['revid2a', 'revid2b'], [])
 
1019
        rev_set = ['revid3', 'revid2a']
 
1020
        self.assertParentIds(['revid1', 'revid2b'], rev_set)
 
1021
 
 
1022
 
 
1023
class TestWithBrokenRepo(TestCaseWithTransport):
 
1024
    """These tests seem to be more appropriate as interface tests?"""
 
1025
 
 
1026
    def make_broken_repository(self):
 
1027
        # XXX: This function is borrowed from Aaron's "Reconcile can fix bad
 
1028
        # parent references" branch which is due to land in bzr.dev soon.  Once
 
1029
        # it does, this duplication should be removed.
 
1030
        repo = self.make_repository('broken-repo')
 
1031
        cleanups = []
 
1032
        try:
 
1033
            repo.lock_write()
 
1034
            cleanups.append(repo.unlock)
 
1035
            repo.start_write_group()
 
1036
            cleanups.append(repo.commit_write_group)
 
1037
            # make rev1a: A well-formed revision, containing 'file1'
 
1038
            inv = inventory.Inventory(revision_id='rev1a')
 
1039
            inv.root.revision = 'rev1a'
 
1040
            self.add_file(repo, inv, 'file1', 'rev1a', [])
 
1041
            repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
 
1042
            repo.add_inventory('rev1a', inv, [])
 
1043
            revision = _mod_revision.Revision('rev1a',
 
1044
                committer='jrandom@example.com', timestamp=0,
 
1045
                inventory_sha1='', timezone=0, message='foo', parent_ids=[])
 
1046
            repo.add_revision('rev1a',revision, inv)
 
1047
 
 
1048
            # make rev1b, which has no Revision, but has an Inventory, and
 
1049
            # file1
 
1050
            inv = inventory.Inventory(revision_id='rev1b')
 
1051
            inv.root.revision = 'rev1b'
 
1052
            self.add_file(repo, inv, 'file1', 'rev1b', [])
 
1053
            repo.add_inventory('rev1b', inv, [])
 
1054
 
 
1055
            # make rev2, with file1 and file2
 
1056
            # file2 is sane
 
1057
            # file1 has 'rev1b' as an ancestor, even though this is not
 
1058
            # mentioned by 'rev1a', making it an unreferenced ancestor
 
1059
            inv = inventory.Inventory()
 
1060
            self.add_file(repo, inv, 'file1', 'rev2', ['rev1a', 'rev1b'])
 
1061
            self.add_file(repo, inv, 'file2', 'rev2', [])
 
1062
            self.add_revision(repo, 'rev2', inv, ['rev1a'])
 
1063
 
 
1064
            # make ghost revision rev1c
 
1065
            inv = inventory.Inventory()
 
1066
            self.add_file(repo, inv, 'file2', 'rev1c', [])
 
1067
 
 
1068
            # make rev3 with file2
 
1069
            # file2 refers to 'rev1c', which is a ghost in this repository, so
 
1070
            # file2 cannot have rev1c as its ancestor.
 
1071
            inv = inventory.Inventory()
 
1072
            self.add_file(repo, inv, 'file2', 'rev3', ['rev1c'])
 
1073
            self.add_revision(repo, 'rev3', inv, ['rev1c'])
 
1074
            return repo
 
1075
        finally:
 
1076
            for cleanup in reversed(cleanups):
 
1077
                cleanup()
 
1078
 
 
1079
    def add_revision(self, repo, revision_id, inv, parent_ids):
 
1080
        inv.revision_id = revision_id
 
1081
        inv.root.revision = revision_id
 
1082
        repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
 
1083
        repo.add_inventory(revision_id, inv, parent_ids)
 
1084
        revision = _mod_revision.Revision(revision_id,
 
1085
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
 
1086
            timezone=0, message='foo', parent_ids=parent_ids)
 
1087
        repo.add_revision(revision_id,revision, inv)
 
1088
 
 
1089
    def add_file(self, repo, inv, filename, revision, parents):
 
1090
        file_id = filename + '-id'
 
1091
        entry = inventory.InventoryFile(file_id, filename, 'TREE_ROOT')
 
1092
        entry.revision = revision
 
1093
        entry.text_size = 0
 
1094
        inv.add(entry)
 
1095
        text_key = (file_id, revision)
 
1096
        parent_keys = [(file_id, parent) for parent in parents]
 
1097
        repo.texts.add_lines(text_key, parent_keys, ['line\n'])
 
1098
 
 
1099
    def test_insert_from_broken_repo(self):
 
1100
        """Inserting a data stream from a broken repository won't silently
 
1101
        corrupt the target repository.
 
1102
        """
 
1103
        broken_repo = self.make_broken_repository()
 
1104
        empty_repo = self.make_repository('empty-repo')
 
1105
        try:
 
1106
            empty_repo.fetch(broken_repo)
 
1107
        except (errors.RevisionNotPresent, errors.BzrCheckError):
 
1108
            # Test successful: compression parent not being copied leads to
 
1109
            # error.
 
1110
            return
 
1111
        empty_repo.lock_read()
 
1112
        self.addCleanup(empty_repo.unlock)
 
1113
        text = empty_repo.texts.get_record_stream(
 
1114
            [('file2-id', 'rev3')], 'topological', True).next()
 
1115
        self.assertEqual('line\n', text.get_bytes_as('fulltext'))
 
1116
 
 
1117
 
 
1118
class TestRepositoryPackCollection(TestCaseWithTransport):
 
1119
 
 
1120
    def get_format(self):
 
1121
        return bzrdir.format_registry.make_bzrdir('pack-0.92')
 
1122
 
 
1123
    def get_packs(self):
 
1124
        format = self.get_format()
 
1125
        repo = self.make_repository('.', format=format)
 
1126
        return repo._pack_collection
 
1127
 
 
1128
    def make_packs_and_alt_repo(self, write_lock=False):
 
1129
        """Create a pack repo with 3 packs, and access it via a second repo."""
 
1130
        tree = self.make_branch_and_tree('.', format=self.get_format())
 
1131
        tree.lock_write()
 
1132
        self.addCleanup(tree.unlock)
 
1133
        rev1 = tree.commit('one')
 
1134
        rev2 = tree.commit('two')
 
1135
        rev3 = tree.commit('three')
 
1136
        r = repository.Repository.open('.')
 
1137
        if write_lock:
 
1138
            r.lock_write()
 
1139
        else:
 
1140
            r.lock_read()
 
1141
        self.addCleanup(r.unlock)
 
1142
        packs = r._pack_collection
 
1143
        packs.ensure_loaded()
 
1144
        return tree, r, packs, [rev1, rev2, rev3]
 
1145
 
 
1146
    def test__max_pack_count(self):
 
1147
        """The maximum pack count is a function of the number of revisions."""
 
1148
        # no revisions - one pack, so that we can have a revision free repo
 
1149
        # without it blowing up
 
1150
        packs = self.get_packs()
 
1151
        self.assertEqual(1, packs._max_pack_count(0))
 
1152
        # after that the sum of the digits, - check the first 1-9
 
1153
        self.assertEqual(1, packs._max_pack_count(1))
 
1154
        self.assertEqual(2, packs._max_pack_count(2))
 
1155
        self.assertEqual(3, packs._max_pack_count(3))
 
1156
        self.assertEqual(4, packs._max_pack_count(4))
 
1157
        self.assertEqual(5, packs._max_pack_count(5))
 
1158
        self.assertEqual(6, packs._max_pack_count(6))
 
1159
        self.assertEqual(7, packs._max_pack_count(7))
 
1160
        self.assertEqual(8, packs._max_pack_count(8))
 
1161
        self.assertEqual(9, packs._max_pack_count(9))
 
1162
        # check the boundary cases with two digits for the next decade
 
1163
        self.assertEqual(1, packs._max_pack_count(10))
 
1164
        self.assertEqual(2, packs._max_pack_count(11))
 
1165
        self.assertEqual(10, packs._max_pack_count(19))
 
1166
        self.assertEqual(2, packs._max_pack_count(20))
 
1167
        self.assertEqual(3, packs._max_pack_count(21))
 
1168
        # check some arbitrary big numbers
 
1169
        self.assertEqual(25, packs._max_pack_count(112894))
 
1170
 
 
1171
    def test_pack_distribution_zero(self):
 
1172
        packs = self.get_packs()
 
1173
        self.assertEqual([0], packs.pack_distribution(0))
 
1174
 
 
1175
    def test_ensure_loaded_unlocked(self):
 
1176
        packs = self.get_packs()
 
1177
        self.assertRaises(errors.ObjectNotLocked,
 
1178
                          packs.ensure_loaded)
 
1179
 
 
1180
    def test_pack_distribution_one_to_nine(self):
 
1181
        packs = self.get_packs()
 
1182
        self.assertEqual([1],
 
1183
            packs.pack_distribution(1))
 
1184
        self.assertEqual([1, 1],
 
1185
            packs.pack_distribution(2))
 
1186
        self.assertEqual([1, 1, 1],
 
1187
            packs.pack_distribution(3))
 
1188
        self.assertEqual([1, 1, 1, 1],
 
1189
            packs.pack_distribution(4))
 
1190
        self.assertEqual([1, 1, 1, 1, 1],
 
1191
            packs.pack_distribution(5))
 
1192
        self.assertEqual([1, 1, 1, 1, 1, 1],
 
1193
            packs.pack_distribution(6))
 
1194
        self.assertEqual([1, 1, 1, 1, 1, 1, 1],
 
1195
            packs.pack_distribution(7))
 
1196
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1],
 
1197
            packs.pack_distribution(8))
 
1198
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1, 1],
 
1199
            packs.pack_distribution(9))
 
1200
 
 
1201
    def test_pack_distribution_stable_at_boundaries(self):
 
1202
        """When there are multi-rev packs the counts are stable."""
 
1203
        packs = self.get_packs()
 
1204
        # in 10s:
 
1205
        self.assertEqual([10], packs.pack_distribution(10))
 
1206
        self.assertEqual([10, 1], packs.pack_distribution(11))
 
1207
        self.assertEqual([10, 10], packs.pack_distribution(20))
 
1208
        self.assertEqual([10, 10, 1], packs.pack_distribution(21))
 
1209
        # 100s
 
1210
        self.assertEqual([100], packs.pack_distribution(100))
 
1211
        self.assertEqual([100, 1], packs.pack_distribution(101))
 
1212
        self.assertEqual([100, 10, 1], packs.pack_distribution(111))
 
1213
        self.assertEqual([100, 100], packs.pack_distribution(200))
 
1214
        self.assertEqual([100, 100, 1], packs.pack_distribution(201))
 
1215
        self.assertEqual([100, 100, 10, 1], packs.pack_distribution(211))
 
1216
 
 
1217
    def test_plan_pack_operations_2009_revisions_skip_all_packs(self):
 
1218
        packs = self.get_packs()
 
1219
        existing_packs = [(2000, "big"), (9, "medium")]
 
1220
        # rev count - 2009 -> 2x1000 + 9x1
 
1221
        pack_operations = packs.plan_autopack_combinations(
 
1222
            existing_packs, [1000, 1000, 1, 1, 1, 1, 1, 1, 1, 1, 1])
 
1223
        self.assertEqual([], pack_operations)
 
1224
 
 
1225
    def test_plan_pack_operations_2010_revisions_skip_all_packs(self):
 
1226
        packs = self.get_packs()
 
1227
        existing_packs = [(2000, "big"), (9, "medium"), (1, "single")]
 
1228
        # rev count - 2010 -> 2x1000 + 1x10
 
1229
        pack_operations = packs.plan_autopack_combinations(
 
1230
            existing_packs, [1000, 1000, 10])
 
1231
        self.assertEqual([], pack_operations)
 
1232
 
 
1233
    def test_plan_pack_operations_2010_combines_smallest_two(self):
 
1234
        packs = self.get_packs()
 
1235
        existing_packs = [(1999, "big"), (9, "medium"), (1, "single2"),
 
1236
            (1, "single1")]
 
1237
        # rev count - 2010 -> 2x1000 + 1x10 (3)
 
1238
        pack_operations = packs.plan_autopack_combinations(
 
1239
            existing_packs, [1000, 1000, 10])
 
1240
        self.assertEqual([[2, ["single2", "single1"]]], pack_operations)
 
1241
 
 
1242
    def test_plan_pack_operations_creates_a_single_op(self):
 
1243
        packs = self.get_packs()
 
1244
        existing_packs = [(50, 'a'), (40, 'b'), (30, 'c'), (10, 'd'),
 
1245
                          (10, 'e'), (6, 'f'), (4, 'g')]
 
1246
        # rev count 150 -> 1x100 and 5x10
 
1247
        # The two size 10 packs do not need to be touched. The 50, 40, 30 would
 
1248
        # be combined into a single 120 size pack, and the 6 & 4 would
 
1249
        # becombined into a size 10 pack. However, if we have to rewrite them,
 
1250
        # we save a pack file with no increased I/O by putting them into the
 
1251
        # same file.
 
1252
        distribution = packs.pack_distribution(150)
 
1253
        pack_operations = packs.plan_autopack_combinations(existing_packs,
 
1254
                                                           distribution)
 
1255
        self.assertEqual([[130, ['a', 'b', 'c', 'f', 'g']]], pack_operations)
 
1256
 
 
1257
    def test_all_packs_none(self):
 
1258
        format = self.get_format()
 
1259
        tree = self.make_branch_and_tree('.', format=format)
 
1260
        tree.lock_read()
 
1261
        self.addCleanup(tree.unlock)
 
1262
        packs = tree.branch.repository._pack_collection
 
1263
        packs.ensure_loaded()
 
1264
        self.assertEqual([], packs.all_packs())
 
1265
 
 
1266
    def test_all_packs_one(self):
 
1267
        format = self.get_format()
 
1268
        tree = self.make_branch_and_tree('.', format=format)
 
1269
        tree.commit('start')
 
1270
        tree.lock_read()
 
1271
        self.addCleanup(tree.unlock)
 
1272
        packs = tree.branch.repository._pack_collection
 
1273
        packs.ensure_loaded()
 
1274
        self.assertEqual([
 
1275
            packs.get_pack_by_name(packs.names()[0])],
 
1276
            packs.all_packs())
 
1277
 
 
1278
    def test_all_packs_two(self):
 
1279
        format = self.get_format()
 
1280
        tree = self.make_branch_and_tree('.', format=format)
 
1281
        tree.commit('start')
 
1282
        tree.commit('continue')
 
1283
        tree.lock_read()
 
1284
        self.addCleanup(tree.unlock)
 
1285
        packs = tree.branch.repository._pack_collection
 
1286
        packs.ensure_loaded()
 
1287
        self.assertEqual([
 
1288
            packs.get_pack_by_name(packs.names()[0]),
 
1289
            packs.get_pack_by_name(packs.names()[1]),
 
1290
            ], packs.all_packs())
 
1291
 
 
1292
    def test_get_pack_by_name(self):
 
1293
        format = self.get_format()
 
1294
        tree = self.make_branch_and_tree('.', format=format)
 
1295
        tree.commit('start')
 
1296
        tree.lock_read()
 
1297
        self.addCleanup(tree.unlock)
 
1298
        packs = tree.branch.repository._pack_collection
 
1299
        packs.reset()
 
1300
        packs.ensure_loaded()
 
1301
        name = packs.names()[0]
 
1302
        pack_1 = packs.get_pack_by_name(name)
 
1303
        # the pack should be correctly initialised
 
1304
        sizes = packs._names[name]
 
1305
        rev_index = GraphIndex(packs._index_transport, name + '.rix', sizes[0])
 
1306
        inv_index = GraphIndex(packs._index_transport, name + '.iix', sizes[1])
 
1307
        txt_index = GraphIndex(packs._index_transport, name + '.tix', sizes[2])
 
1308
        sig_index = GraphIndex(packs._index_transport, name + '.six', sizes[3])
 
1309
        self.assertEqual(pack_repo.ExistingPack(packs._pack_transport,
 
1310
            name, rev_index, inv_index, txt_index, sig_index), pack_1)
 
1311
        # and the same instance should be returned on successive calls.
 
1312
        self.assertTrue(pack_1 is packs.get_pack_by_name(name))
 
1313
 
 
1314
    def test_reload_pack_names_new_entry(self):
 
1315
        tree, r, packs, revs = self.make_packs_and_alt_repo()
 
1316
        names = packs.names()
 
1317
        # Add a new pack file into the repository
 
1318
        rev4 = tree.commit('four')
 
1319
        new_names = tree.branch.repository._pack_collection.names()
 
1320
        new_name = set(new_names).difference(names)
 
1321
        self.assertEqual(1, len(new_name))
 
1322
        new_name = new_name.pop()
 
1323
        # The old collection hasn't noticed yet
 
1324
        self.assertEqual(names, packs.names())
 
1325
        self.assertTrue(packs.reload_pack_names())
 
1326
        self.assertEqual(new_names, packs.names())
 
1327
        # And the repository can access the new revision
 
1328
        self.assertEqual({rev4:(revs[-1],)}, r.get_parent_map([rev4]))
 
1329
        self.assertFalse(packs.reload_pack_names())
 
1330
 
 
1331
    def test_reload_pack_names_added_and_removed(self):
 
1332
        tree, r, packs, revs = self.make_packs_and_alt_repo()
 
1333
        names = packs.names()
 
1334
        # Now repack the whole thing
 
1335
        tree.branch.repository.pack()
 
1336
        new_names = tree.branch.repository._pack_collection.names()
 
1337
        # The other collection hasn't noticed yet
 
1338
        self.assertEqual(names, packs.names())
 
1339
        self.assertTrue(packs.reload_pack_names())
 
1340
        self.assertEqual(new_names, packs.names())
 
1341
        self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
 
1342
        self.assertFalse(packs.reload_pack_names())
 
1343
 
 
1344
    def test_autopack_reloads_and_stops(self):
 
1345
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1346
        # After we have determined what needs to be autopacked, trigger a
 
1347
        # full-pack via the other repo which will cause us to re-evaluate and
 
1348
        # decide we don't need to do anything
 
1349
        orig_execute = packs._execute_pack_operations
 
1350
        def _munged_execute_pack_ops(*args, **kwargs):
 
1351
            tree.branch.repository.pack()
 
1352
            return orig_execute(*args, **kwargs)
 
1353
        packs._execute_pack_operations = _munged_execute_pack_ops
 
1354
        packs._max_pack_count = lambda x: 1
 
1355
        packs.pack_distribution = lambda x: [10]
 
1356
        self.assertFalse(packs.autopack())
 
1357
        self.assertEqual(1, len(packs.names()))
 
1358
        self.assertEqual(tree.branch.repository._pack_collection.names(),
 
1359
                         packs.names())
 
1360
 
 
1361
 
 
1362
class TestPack(TestCaseWithTransport):
 
1363
    """Tests for the Pack object."""
 
1364
 
 
1365
    def assertCurrentlyEqual(self, left, right):
 
1366
        self.assertTrue(left == right)
 
1367
        self.assertTrue(right == left)
 
1368
        self.assertFalse(left != right)
 
1369
        self.assertFalse(right != left)
 
1370
 
 
1371
    def assertCurrentlyNotEqual(self, left, right):
 
1372
        self.assertFalse(left == right)
 
1373
        self.assertFalse(right == left)
 
1374
        self.assertTrue(left != right)
 
1375
        self.assertTrue(right != left)
 
1376
 
 
1377
    def test___eq____ne__(self):
 
1378
        left = pack_repo.ExistingPack('', '', '', '', '', '')
 
1379
        right = pack_repo.ExistingPack('', '', '', '', '', '')
 
1380
        self.assertCurrentlyEqual(left, right)
 
1381
        # change all attributes and ensure equality changes as we do.
 
1382
        left.revision_index = 'a'
 
1383
        self.assertCurrentlyNotEqual(left, right)
 
1384
        right.revision_index = 'a'
 
1385
        self.assertCurrentlyEqual(left, right)
 
1386
        left.inventory_index = 'a'
 
1387
        self.assertCurrentlyNotEqual(left, right)
 
1388
        right.inventory_index = 'a'
 
1389
        self.assertCurrentlyEqual(left, right)
 
1390
        left.text_index = 'a'
 
1391
        self.assertCurrentlyNotEqual(left, right)
 
1392
        right.text_index = 'a'
 
1393
        self.assertCurrentlyEqual(left, right)
 
1394
        left.signature_index = 'a'
 
1395
        self.assertCurrentlyNotEqual(left, right)
 
1396
        right.signature_index = 'a'
 
1397
        self.assertCurrentlyEqual(left, right)
 
1398
        left.name = 'a'
 
1399
        self.assertCurrentlyNotEqual(left, right)
 
1400
        right.name = 'a'
 
1401
        self.assertCurrentlyEqual(left, right)
 
1402
        left.transport = 'a'
 
1403
        self.assertCurrentlyNotEqual(left, right)
 
1404
        right.transport = 'a'
 
1405
        self.assertCurrentlyEqual(left, right)
 
1406
 
 
1407
    def test_file_name(self):
 
1408
        pack = pack_repo.ExistingPack('', 'a_name', '', '', '', '')
 
1409
        self.assertEqual('a_name.pack', pack.file_name())
 
1410
 
 
1411
 
 
1412
class TestNewPack(TestCaseWithTransport):
 
1413
    """Tests for pack_repo.NewPack."""
 
1414
 
 
1415
    def test_new_instance_attributes(self):
 
1416
        upload_transport = self.get_transport('upload')
 
1417
        pack_transport = self.get_transport('pack')
 
1418
        index_transport = self.get_transport('index')
 
1419
        upload_transport.mkdir('.')
 
1420
        collection = pack_repo.RepositoryPackCollection(
 
1421
            repo=None,
 
1422
            transport=self.get_transport('.'),
 
1423
            index_transport=index_transport,
 
1424
            upload_transport=upload_transport,
 
1425
            pack_transport=pack_transport,
 
1426
            index_builder_class=BTreeBuilder,
 
1427
            index_class=BTreeGraphIndex,
 
1428
            use_chk_index=False)
 
1429
        pack = pack_repo.NewPack(collection)
 
1430
        self.addCleanup(pack.abort) # Make sure the write stream gets closed
 
1431
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
 
1432
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
 
1433
        self.assertIsInstance(pack._hash, type(osutils.md5()))
 
1434
        self.assertTrue(pack.upload_transport is upload_transport)
 
1435
        self.assertTrue(pack.index_transport is index_transport)
 
1436
        self.assertTrue(pack.pack_transport is pack_transport)
 
1437
        self.assertEqual(None, pack.index_sizes)
 
1438
        self.assertEqual(20, len(pack.random_name))
 
1439
        self.assertIsInstance(pack.random_name, str)
 
1440
        self.assertIsInstance(pack.start_time, float)
 
1441
 
 
1442
 
 
1443
class TestPacker(TestCaseWithTransport):
 
1444
    """Tests for the packs repository Packer class."""
 
1445
 
 
1446
    def test_pack_optimizes_pack_order(self):
 
1447
        builder = self.make_branch_builder('.', format="1.9")
 
1448
        builder.start_series()
 
1449
        builder.build_snapshot('A', None, [
 
1450
            ('add', ('', 'root-id', 'directory', None)),
 
1451
            ('add', ('f', 'f-id', 'file', 'content\n'))])
 
1452
        builder.build_snapshot('B', ['A'],
 
1453
            [('modify', ('f-id', 'new-content\n'))])
 
1454
        builder.build_snapshot('C', ['B'],
 
1455
            [('modify', ('f-id', 'third-content\n'))])
 
1456
        builder.build_snapshot('D', ['C'],
 
1457
            [('modify', ('f-id', 'fourth-content\n'))])
 
1458
        b = builder.get_branch()
 
1459
        b.lock_read()
 
1460
        builder.finish_series()
 
1461
        self.addCleanup(b.unlock)
 
1462
        # At this point, we should have 4 pack files available
 
1463
        # Because of how they were built, they correspond to
 
1464
        # ['D', 'C', 'B', 'A']
 
1465
        packs = b.repository._pack_collection.packs
 
1466
        packer = pack_repo.Packer(b.repository._pack_collection,
 
1467
                                  packs, 'testing',
 
1468
                                  revision_ids=['B', 'C'])
 
1469
        # Now, when we are copying the B & C revisions, their pack files should
 
1470
        # be moved to the front of the stack
 
1471
        # The new ordering moves B & C to the front of the .packs attribute,
 
1472
        # and leaves the others in the original order.
 
1473
        new_packs = [packs[1], packs[2], packs[0], packs[3]]
 
1474
        new_pack = packer.pack()
 
1475
        self.assertEqual(new_packs, packer.packs)
 
1476
 
 
1477
 
 
1478
class TestOptimisingPacker(TestCaseWithTransport):
 
1479
    """Tests for the OptimisingPacker class."""
 
1480
 
 
1481
    def get_pack_collection(self):
 
1482
        repo = self.make_repository('.')
 
1483
        return repo._pack_collection
 
1484
 
 
1485
    def test_open_pack_will_optimise(self):
 
1486
        packer = pack_repo.OptimisingPacker(self.get_pack_collection(),
 
1487
                                            [], '.test')
 
1488
        new_pack = packer.open_pack()
 
1489
        self.addCleanup(new_pack.abort) # ensure cleanup
 
1490
        self.assertIsInstance(new_pack, pack_repo.NewPack)
 
1491
        self.assertTrue(new_pack.revision_index._optimize_for_size)
 
1492
        self.assertTrue(new_pack.inventory_index._optimize_for_size)
 
1493
        self.assertTrue(new_pack.text_index._optimize_for_size)
 
1494
        self.assertTrue(new_pack.signature_index._optimize_for_size)
 
1495
 
 
1496
 
 
1497
class TestCrossFormatPacks(TestCaseWithTransport):
 
1498
 
 
1499
    def log_pack(self, hint=None):
 
1500
        self.calls.append(('pack', hint))
 
1501
        self.orig_pack(hint=hint)
 
1502
        if self.expect_hint:
 
1503
            self.assertTrue(hint)
 
1504
 
 
1505
    def run_stream(self, src_fmt, target_fmt, expect_pack_called):
 
1506
        self.expect_hint = expect_pack_called
 
1507
        self.calls = []
 
1508
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
 
1509
        source_tree.lock_write()
 
1510
        self.addCleanup(source_tree.unlock)
 
1511
        tip = source_tree.commit('foo')
 
1512
        target = self.make_repository('target', format=target_fmt)
 
1513
        target.lock_write()
 
1514
        self.addCleanup(target.unlock)
 
1515
        source = source_tree.branch.repository._get_source(target._format)
 
1516
        self.orig_pack = target.pack
 
1517
        target.pack = self.log_pack
 
1518
        search = target.search_missing_revision_ids(
 
1519
            source_tree.branch.repository, tip)
 
1520
        stream = source.get_stream(search)
 
1521
        from_format = source_tree.branch.repository._format
 
1522
        sink = target._get_sink()
 
1523
        sink.insert_stream(stream, from_format, [])
 
1524
        if expect_pack_called:
 
1525
            self.assertLength(1, self.calls)
 
1526
        else:
 
1527
            self.assertLength(0, self.calls)
 
1528
 
 
1529
    def run_fetch(self, src_fmt, target_fmt, expect_pack_called):
 
1530
        self.expect_hint = expect_pack_called
 
1531
        self.calls = []
 
1532
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
 
1533
        source_tree.lock_write()
 
1534
        self.addCleanup(source_tree.unlock)
 
1535
        tip = source_tree.commit('foo')
 
1536
        target = self.make_repository('target', format=target_fmt)
 
1537
        target.lock_write()
 
1538
        self.addCleanup(target.unlock)
 
1539
        source = source_tree.branch.repository
 
1540
        self.orig_pack = target.pack
 
1541
        target.pack = self.log_pack
 
1542
        target.fetch(source)
 
1543
        if expect_pack_called:
 
1544
            self.assertLength(1, self.calls)
 
1545
        else:
 
1546
            self.assertLength(0, self.calls)
 
1547
 
 
1548
    def test_sink_format_hint_no(self):
 
1549
        # When the target format says packing makes no difference, pack is not
 
1550
        # called.
 
1551
        self.run_stream('1.9', 'rich-root-pack', False)
 
1552
 
 
1553
    def test_sink_format_hint_yes(self):
 
1554
        # When the target format says packing makes a difference, pack is
 
1555
        # called.
 
1556
        self.run_stream('1.9', '2a', True)
 
1557
 
 
1558
    def test_sink_format_same_no(self):
 
1559
        # When the formats are the same, pack is not called.
 
1560
        self.run_stream('2a', '2a', False)
 
1561
 
 
1562
    def test_IDS_format_hint_no(self):
 
1563
        # When the target format says packing makes no difference, pack is not
 
1564
        # called.
 
1565
        self.run_fetch('1.9', 'rich-root-pack', False)
 
1566
 
 
1567
    def test_IDS_format_hint_yes(self):
 
1568
        # When the target format says packing makes a difference, pack is
 
1569
        # called.
 
1570
        self.run_fetch('1.9', '2a', True)
 
1571
 
 
1572
    def test_IDS_format_same_no(self):
 
1573
        # When the formats are the same, pack is not called.
 
1574
        self.run_fetch('2a', '2a', False)