/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_repository.py

  • Committer: John Arbash Meinel
  • Date: 2009-06-18 18:18:36 UTC
  • mto: This revision was merged to the branch mainline in revision 4461.
  • Revision ID: john@arbash-meinel.com-20090618181836-biodfkat9a8eyzjz
The new add_inventory_by_delta is returning a CHKInventory when mapping from NULL
Which is completely valid, but 'broke' one of the tests.
So to fix it, changed the test to use CHKInventories on both sides, and add an __eq__
member. The nice thing is that CHKInventory.__eq__ is fairly cheap, since it only
has to check the root keys.

Show diffs side-by-side

added added

removed removed

Lines of Context:
 
1
# Copyright (C) 2006, 2007, 2008, 2009 Canonical Ltd
 
2
#
 
3
# This program is free software; you can redistribute it and/or modify
 
4
# it under the terms of the GNU General Public License as published by
 
5
# the Free Software Foundation; either version 2 of the License, or
 
6
# (at your option) any later version.
 
7
#
 
8
# This program is distributed in the hope that it will be useful,
 
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
11
# GNU General Public License for more details.
 
12
#
 
13
# You should have received a copy of the GNU General Public License
 
14
# along with this program; if not, write to the Free Software
 
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
 
16
 
 
17
"""Tests for the Repository facility that are not interface tests.
 
18
 
 
19
For interface tests see tests/per_repository/*.py.
 
20
 
 
21
For concrete class tests see this file, and for storage formats tests
 
22
also see this file.
 
23
"""
 
24
 
 
25
from stat import S_ISDIR
 
26
from StringIO import StringIO
 
27
 
 
28
import bzrlib
 
29
from bzrlib.errors import (NotBranchError,
 
30
                           NoSuchFile,
 
31
                           UnknownFormatError,
 
32
                           UnsupportedFormatError,
 
33
                           )
 
34
from bzrlib import graph
 
35
from bzrlib.branchbuilder import BranchBuilder
 
36
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
 
37
from bzrlib.index import GraphIndex, InMemoryGraphIndex
 
38
from bzrlib.repository import RepositoryFormat
 
39
from bzrlib.smart import server
 
40
from bzrlib.tests import (
 
41
    TestCase,
 
42
    TestCaseWithTransport,
 
43
    TestSkipped,
 
44
    test_knit,
 
45
    )
 
46
from bzrlib.transport import (
 
47
    fakenfs,
 
48
    get_transport,
 
49
    )
 
50
from bzrlib.transport.memory import MemoryServer
 
51
from bzrlib import (
 
52
    bencode,
 
53
    bzrdir,
 
54
    errors,
 
55
    inventory,
 
56
    osutils,
 
57
    progress,
 
58
    repository,
 
59
    revision as _mod_revision,
 
60
    symbol_versioning,
 
61
    upgrade,
 
62
    workingtree,
 
63
    )
 
64
from bzrlib.repofmt import (
 
65
    groupcompress_repo,
 
66
    knitrepo,
 
67
    pack_repo,
 
68
    weaverepo,
 
69
    )
 
70
 
 
71
 
 
72
class TestDefaultFormat(TestCase):
 
73
 
 
74
    def test_get_set_default_format(self):
 
75
        old_default = bzrdir.format_registry.get('default')
 
76
        private_default = old_default().repository_format.__class__
 
77
        old_format = repository.RepositoryFormat.get_default_format()
 
78
        self.assertTrue(isinstance(old_format, private_default))
 
79
        def make_sample_bzrdir():
 
80
            my_bzrdir = bzrdir.BzrDirMetaFormat1()
 
81
            my_bzrdir.repository_format = SampleRepositoryFormat()
 
82
            return my_bzrdir
 
83
        bzrdir.format_registry.remove('default')
 
84
        bzrdir.format_registry.register('sample', make_sample_bzrdir, '')
 
85
        bzrdir.format_registry.set_default('sample')
 
86
        # creating a repository should now create an instrumented dir.
 
87
        try:
 
88
            # the default branch format is used by the meta dir format
 
89
            # which is not the default bzrdir format at this point
 
90
            dir = bzrdir.BzrDirMetaFormat1().initialize('memory:///')
 
91
            result = dir.create_repository()
 
92
            self.assertEqual(result, 'A bzr repository dir')
 
93
        finally:
 
94
            bzrdir.format_registry.remove('default')
 
95
            bzrdir.format_registry.remove('sample')
 
96
            bzrdir.format_registry.register('default', old_default, '')
 
97
        self.assertIsInstance(repository.RepositoryFormat.get_default_format(),
 
98
                              old_format.__class__)
 
99
 
 
100
 
 
101
class SampleRepositoryFormat(repository.RepositoryFormat):
 
102
    """A sample format
 
103
 
 
104
    this format is initializable, unsupported to aid in testing the
 
105
    open and open(unsupported=True) routines.
 
106
    """
 
107
 
 
108
    def get_format_string(self):
 
109
        """See RepositoryFormat.get_format_string()."""
 
110
        return "Sample .bzr repository format."
 
111
 
 
112
    def initialize(self, a_bzrdir, shared=False):
 
113
        """Initialize a repository in a BzrDir"""
 
114
        t = a_bzrdir.get_repository_transport(self)
 
115
        t.put_bytes('format', self.get_format_string())
 
116
        return 'A bzr repository dir'
 
117
 
 
118
    def is_supported(self):
 
119
        return False
 
120
 
 
121
    def open(self, a_bzrdir, _found=False):
 
122
        return "opened repository."
 
123
 
 
124
 
 
125
class TestRepositoryFormat(TestCaseWithTransport):
 
126
    """Tests for the Repository format detection used by the bzr meta dir facility.BzrBranchFormat facility."""
 
127
 
 
128
    def test_find_format(self):
 
129
        # is the right format object found for a repository?
 
130
        # create a branch with a few known format objects.
 
131
        # this is not quite the same as
 
132
        self.build_tree(["foo/", "bar/"])
 
133
        def check_format(format, url):
 
134
            dir = format._matchingbzrdir.initialize(url)
 
135
            format.initialize(dir)
 
136
            t = get_transport(url)
 
137
            found_format = repository.RepositoryFormat.find_format(dir)
 
138
            self.failUnless(isinstance(found_format, format.__class__))
 
139
        check_format(weaverepo.RepositoryFormat7(), "bar")
 
140
 
 
141
    def test_find_format_no_repository(self):
 
142
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
143
        self.assertRaises(errors.NoRepositoryPresent,
 
144
                          repository.RepositoryFormat.find_format,
 
145
                          dir)
 
146
 
 
147
    def test_find_format_unknown_format(self):
 
148
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
149
        SampleRepositoryFormat().initialize(dir)
 
150
        self.assertRaises(UnknownFormatError,
 
151
                          repository.RepositoryFormat.find_format,
 
152
                          dir)
 
153
 
 
154
    def test_register_unregister_format(self):
 
155
        format = SampleRepositoryFormat()
 
156
        # make a control dir
 
157
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
158
        # make a repo
 
159
        format.initialize(dir)
 
160
        # register a format for it.
 
161
        repository.RepositoryFormat.register_format(format)
 
162
        # which repository.Open will refuse (not supported)
 
163
        self.assertRaises(UnsupportedFormatError, repository.Repository.open, self.get_url())
 
164
        # but open(unsupported) will work
 
165
        self.assertEqual(format.open(dir), "opened repository.")
 
166
        # unregister the format
 
167
        repository.RepositoryFormat.unregister_format(format)
 
168
 
 
169
 
 
170
class TestFormat6(TestCaseWithTransport):
 
171
 
 
172
    def test_attribute__fetch_order(self):
 
173
        """Weaves need topological data insertion."""
 
174
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
175
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
176
        self.assertEqual('topological', repo._format._fetch_order)
 
177
 
 
178
    def test_attribute__fetch_uses_deltas(self):
 
179
        """Weaves do not reuse deltas."""
 
180
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
181
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
182
        self.assertEqual(False, repo._format._fetch_uses_deltas)
 
183
 
 
184
    def test_attribute__fetch_reconcile(self):
 
185
        """Weave repositories need a reconcile after fetch."""
 
186
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
187
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
188
        self.assertEqual(True, repo._format._fetch_reconcile)
 
189
 
 
190
    def test_no_ancestry_weave(self):
 
191
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
192
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
193
        # We no longer need to create the ancestry.weave file
 
194
        # since it is *never* used.
 
195
        self.assertRaises(NoSuchFile,
 
196
                          control.transport.get,
 
197
                          'ancestry.weave')
 
198
 
 
199
    def test_supports_external_lookups(self):
 
200
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
201
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
202
        self.assertFalse(repo._format.supports_external_lookups)
 
203
 
 
204
 
 
205
class TestFormat7(TestCaseWithTransport):
 
206
 
 
207
    def test_attribute__fetch_order(self):
 
208
        """Weaves need topological data insertion."""
 
209
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
210
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
211
        self.assertEqual('topological', repo._format._fetch_order)
 
212
 
 
213
    def test_attribute__fetch_uses_deltas(self):
 
214
        """Weaves do not reuse deltas."""
 
215
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
216
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
217
        self.assertEqual(False, repo._format._fetch_uses_deltas)
 
218
 
 
219
    def test_attribute__fetch_reconcile(self):
 
220
        """Weave repositories need a reconcile after fetch."""
 
221
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
222
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
223
        self.assertEqual(True, repo._format._fetch_reconcile)
 
224
 
 
225
    def test_disk_layout(self):
 
226
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
227
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
228
        # in case of side effects of locking.
 
229
        repo.lock_write()
 
230
        repo.unlock()
 
231
        # we want:
 
232
        # format 'Bazaar-NG Repository format 7'
 
233
        # lock ''
 
234
        # inventory.weave == empty_weave
 
235
        # empty revision-store directory
 
236
        # empty weaves directory
 
237
        t = control.get_repository_transport(None)
 
238
        self.assertEqualDiff('Bazaar-NG Repository format 7',
 
239
                             t.get('format').read())
 
240
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
 
241
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
 
242
        self.assertEqualDiff('# bzr weave file v5\n'
 
243
                             'w\n'
 
244
                             'W\n',
 
245
                             t.get('inventory.weave').read())
 
246
        # Creating a file with id Foo:Bar results in a non-escaped file name on
 
247
        # disk.
 
248
        control.create_branch()
 
249
        tree = control.create_workingtree()
 
250
        tree.add(['foo'], ['Foo:Bar'], ['file'])
 
251
        tree.put_file_bytes_non_atomic('Foo:Bar', 'content\n')
 
252
        tree.commit('first post', rev_id='first')
 
253
        self.assertEqualDiff(
 
254
            '# bzr weave file v5\n'
 
255
            'i\n'
 
256
            '1 7fe70820e08a1aac0ef224d9c66ab66831cc4ab1\n'
 
257
            'n first\n'
 
258
            '\n'
 
259
            'w\n'
 
260
            '{ 0\n'
 
261
            '. content\n'
 
262
            '}\n'
 
263
            'W\n',
 
264
            t.get('weaves/74/Foo%3ABar.weave').read())
 
265
 
 
266
    def test_shared_disk_layout(self):
 
267
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
268
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
269
        # we want:
 
270
        # format 'Bazaar-NG Repository format 7'
 
271
        # inventory.weave == empty_weave
 
272
        # empty revision-store directory
 
273
        # empty weaves directory
 
274
        # a 'shared-storage' marker file.
 
275
        # lock is not present when unlocked
 
276
        t = control.get_repository_transport(None)
 
277
        self.assertEqualDiff('Bazaar-NG Repository format 7',
 
278
                             t.get('format').read())
 
279
        self.assertEqualDiff('', t.get('shared-storage').read())
 
280
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
 
281
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
 
282
        self.assertEqualDiff('# bzr weave file v5\n'
 
283
                             'w\n'
 
284
                             'W\n',
 
285
                             t.get('inventory.weave').read())
 
286
        self.assertFalse(t.has('branch-lock'))
 
287
 
 
288
    def test_creates_lockdir(self):
 
289
        """Make sure it appears to be controlled by a LockDir existence"""
 
290
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
291
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
292
        t = control.get_repository_transport(None)
 
293
        # TODO: Should check there is a 'lock' toplevel directory,
 
294
        # regardless of contents
 
295
        self.assertFalse(t.has('lock/held/info'))
 
296
        repo.lock_write()
 
297
        try:
 
298
            self.assertTrue(t.has('lock/held/info'))
 
299
        finally:
 
300
            # unlock so we don't get a warning about failing to do so
 
301
            repo.unlock()
 
302
 
 
303
    def test_uses_lockdir(self):
 
304
        """repo format 7 actually locks on lockdir"""
 
305
        base_url = self.get_url()
 
306
        control = bzrdir.BzrDirMetaFormat1().initialize(base_url)
 
307
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
308
        t = control.get_repository_transport(None)
 
309
        repo.lock_write()
 
310
        repo.unlock()
 
311
        del repo
 
312
        # make sure the same lock is created by opening it
 
313
        repo = repository.Repository.open(base_url)
 
314
        repo.lock_write()
 
315
        self.assertTrue(t.has('lock/held/info'))
 
316
        repo.unlock()
 
317
        self.assertFalse(t.has('lock/held/info'))
 
318
 
 
319
    def test_shared_no_tree_disk_layout(self):
 
320
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
321
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
322
        repo.set_make_working_trees(False)
 
323
        # we want:
 
324
        # format 'Bazaar-NG Repository format 7'
 
325
        # lock ''
 
326
        # inventory.weave == empty_weave
 
327
        # empty revision-store directory
 
328
        # empty weaves directory
 
329
        # a 'shared-storage' marker file.
 
330
        t = control.get_repository_transport(None)
 
331
        self.assertEqualDiff('Bazaar-NG Repository format 7',
 
332
                             t.get('format').read())
 
333
        ## self.assertEqualDiff('', t.get('lock').read())
 
334
        self.assertEqualDiff('', t.get('shared-storage').read())
 
335
        self.assertEqualDiff('', t.get('no-working-trees').read())
 
336
        repo.set_make_working_trees(True)
 
337
        self.assertFalse(t.has('no-working-trees'))
 
338
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
 
339
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
 
340
        self.assertEqualDiff('# bzr weave file v5\n'
 
341
                             'w\n'
 
342
                             'W\n',
 
343
                             t.get('inventory.weave').read())
 
344
 
 
345
    def test_supports_external_lookups(self):
 
346
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
347
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
348
        self.assertFalse(repo._format.supports_external_lookups)
 
349
 
 
350
 
 
351
class TestFormatKnit1(TestCaseWithTransport):
 
352
 
 
353
    def test_attribute__fetch_order(self):
 
354
        """Knits need topological data insertion."""
 
355
        repo = self.make_repository('.',
 
356
                format=bzrdir.format_registry.get('knit')())
 
357
        self.assertEqual('topological', repo._format._fetch_order)
 
358
 
 
359
    def test_attribute__fetch_uses_deltas(self):
 
360
        """Knits reuse deltas."""
 
361
        repo = self.make_repository('.',
 
362
                format=bzrdir.format_registry.get('knit')())
 
363
        self.assertEqual(True, repo._format._fetch_uses_deltas)
 
364
 
 
365
    def test_disk_layout(self):
 
366
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
367
        repo = knitrepo.RepositoryFormatKnit1().initialize(control)
 
368
        # in case of side effects of locking.
 
369
        repo.lock_write()
 
370
        repo.unlock()
 
371
        # we want:
 
372
        # format 'Bazaar-NG Knit Repository Format 1'
 
373
        # lock: is a directory
 
374
        # inventory.weave == empty_weave
 
375
        # empty revision-store directory
 
376
        # empty weaves directory
 
377
        t = control.get_repository_transport(None)
 
378
        self.assertEqualDiff('Bazaar-NG Knit Repository Format 1',
 
379
                             t.get('format').read())
 
380
        # XXX: no locks left when unlocked at the moment
 
381
        # self.assertEqualDiff('', t.get('lock').read())
 
382
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
 
383
        self.check_knits(t)
 
384
        # Check per-file knits.
 
385
        branch = control.create_branch()
 
386
        tree = control.create_workingtree()
 
387
        tree.add(['foo'], ['Nasty-IdC:'], ['file'])
 
388
        tree.put_file_bytes_non_atomic('Nasty-IdC:', '')
 
389
        tree.commit('1st post', rev_id='foo')
 
390
        self.assertHasKnit(t, 'knits/e8/%254easty-%2549d%2543%253a',
 
391
            '\nfoo fulltext 0 81  :')
 
392
 
 
393
    def assertHasKnit(self, t, knit_name, extra_content=''):
 
394
        """Assert that knit_name exists on t."""
 
395
        self.assertEqualDiff('# bzr knit index 8\n' + extra_content,
 
396
                             t.get(knit_name + '.kndx').read())
 
397
 
 
398
    def check_knits(self, t):
 
399
        """check knit content for a repository."""
 
400
        self.assertHasKnit(t, 'inventory')
 
401
        self.assertHasKnit(t, 'revisions')
 
402
        self.assertHasKnit(t, 'signatures')
 
403
 
 
404
    def test_shared_disk_layout(self):
 
405
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
406
        repo = knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
 
407
        # we want:
 
408
        # format 'Bazaar-NG Knit Repository Format 1'
 
409
        # lock: is a directory
 
410
        # inventory.weave == empty_weave
 
411
        # empty revision-store directory
 
412
        # empty weaves directory
 
413
        # a 'shared-storage' marker file.
 
414
        t = control.get_repository_transport(None)
 
415
        self.assertEqualDiff('Bazaar-NG Knit Repository Format 1',
 
416
                             t.get('format').read())
 
417
        # XXX: no locks left when unlocked at the moment
 
418
        # self.assertEqualDiff('', t.get('lock').read())
 
419
        self.assertEqualDiff('', t.get('shared-storage').read())
 
420
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
 
421
        self.check_knits(t)
 
422
 
 
423
    def test_shared_no_tree_disk_layout(self):
 
424
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
425
        repo = knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
 
426
        repo.set_make_working_trees(False)
 
427
        # we want:
 
428
        # format 'Bazaar-NG Knit Repository Format 1'
 
429
        # lock ''
 
430
        # inventory.weave == empty_weave
 
431
        # empty revision-store directory
 
432
        # empty weaves directory
 
433
        # a 'shared-storage' marker file.
 
434
        t = control.get_repository_transport(None)
 
435
        self.assertEqualDiff('Bazaar-NG Knit Repository Format 1',
 
436
                             t.get('format').read())
 
437
        # XXX: no locks left when unlocked at the moment
 
438
        # self.assertEqualDiff('', t.get('lock').read())
 
439
        self.assertEqualDiff('', t.get('shared-storage').read())
 
440
        self.assertEqualDiff('', t.get('no-working-trees').read())
 
441
        repo.set_make_working_trees(True)
 
442
        self.assertFalse(t.has('no-working-trees'))
 
443
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
 
444
        self.check_knits(t)
 
445
 
 
446
    def test_deserialise_sets_root_revision(self):
 
447
        """We must have a inventory.root.revision
 
448
 
 
449
        Old versions of the XML5 serializer did not set the revision_id for
 
450
        the whole inventory. So we grab the one from the expected text. Which
 
451
        is valid when the api is not being abused.
 
452
        """
 
453
        repo = self.make_repository('.',
 
454
                format=bzrdir.format_registry.get('knit')())
 
455
        inv_xml = '<inventory format="5">\n</inventory>\n'
 
456
        inv = repo.deserialise_inventory('test-rev-id', inv_xml)
 
457
        self.assertEqual('test-rev-id', inv.root.revision)
 
458
 
 
459
    def test_deserialise_uses_global_revision_id(self):
 
460
        """If it is set, then we re-use the global revision id"""
 
461
        repo = self.make_repository('.',
 
462
                format=bzrdir.format_registry.get('knit')())
 
463
        inv_xml = ('<inventory format="5" revision_id="other-rev-id">\n'
 
464
                   '</inventory>\n')
 
465
        # Arguably, the deserialise_inventory should detect a mismatch, and
 
466
        # raise an error, rather than silently using one revision_id over the
 
467
        # other.
 
468
        self.assertRaises(AssertionError, repo.deserialise_inventory,
 
469
            'test-rev-id', inv_xml)
 
470
        inv = repo.deserialise_inventory('other-rev-id', inv_xml)
 
471
        self.assertEqual('other-rev-id', inv.root.revision)
 
472
 
 
473
    def test_supports_external_lookups(self):
 
474
        repo = self.make_repository('.',
 
475
                format=bzrdir.format_registry.get('knit')())
 
476
        self.assertFalse(repo._format.supports_external_lookups)
 
477
 
 
478
 
 
479
class DummyRepository(object):
 
480
    """A dummy repository for testing."""
 
481
 
 
482
    _format = None
 
483
    _serializer = None
 
484
 
 
485
    def supports_rich_root(self):
 
486
        return False
 
487
 
 
488
    def get_graph(self):
 
489
        raise NotImplementedError
 
490
 
 
491
    def get_parent_map(self, revision_ids):
 
492
        raise NotImplementedError
 
493
 
 
494
 
 
495
class InterDummy(repository.InterRepository):
 
496
    """An inter-repository optimised code path for DummyRepository.
 
497
 
 
498
    This is for use during testing where we use DummyRepository as repositories
 
499
    so that none of the default regsitered inter-repository classes will
 
500
    MATCH.
 
501
    """
 
502
 
 
503
    @staticmethod
 
504
    def is_compatible(repo_source, repo_target):
 
505
        """InterDummy is compatible with DummyRepository."""
 
506
        return (isinstance(repo_source, DummyRepository) and
 
507
            isinstance(repo_target, DummyRepository))
 
508
 
 
509
 
 
510
class TestInterRepository(TestCaseWithTransport):
 
511
 
 
512
    def test_get_default_inter_repository(self):
 
513
        # test that the InterRepository.get(repo_a, repo_b) probes
 
514
        # for a inter_repo class where is_compatible(repo_a, repo_b) returns
 
515
        # true and returns a default inter_repo otherwise.
 
516
        # This also tests that the default registered optimised interrepository
 
517
        # classes do not barf inappropriately when a surprising repository type
 
518
        # is handed to them.
 
519
        dummy_a = DummyRepository()
 
520
        dummy_b = DummyRepository()
 
521
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
 
522
 
 
523
    def assertGetsDefaultInterRepository(self, repo_a, repo_b):
 
524
        """Asserts that InterRepository.get(repo_a, repo_b) -> the default.
 
525
 
 
526
        The effective default is now InterSameDataRepository because there is
 
527
        no actual sane default in the presence of incompatible data models.
 
528
        """
 
529
        inter_repo = repository.InterRepository.get(repo_a, repo_b)
 
530
        self.assertEqual(repository.InterSameDataRepository,
 
531
                         inter_repo.__class__)
 
532
        self.assertEqual(repo_a, inter_repo.source)
 
533
        self.assertEqual(repo_b, inter_repo.target)
 
534
 
 
535
    def test_register_inter_repository_class(self):
 
536
        # test that a optimised code path provider - a
 
537
        # InterRepository subclass can be registered and unregistered
 
538
        # and that it is correctly selected when given a repository
 
539
        # pair that it returns true on for the is_compatible static method
 
540
        # check
 
541
        dummy_a = DummyRepository()
 
542
        dummy_b = DummyRepository()
 
543
        repo = self.make_repository('.')
 
544
        # hack dummies to look like repo somewhat.
 
545
        dummy_a._serializer = repo._serializer
 
546
        dummy_b._serializer = repo._serializer
 
547
        repository.InterRepository.register_optimiser(InterDummy)
 
548
        try:
 
549
            # we should get the default for something InterDummy returns False
 
550
            # to
 
551
            self.assertFalse(InterDummy.is_compatible(dummy_a, repo))
 
552
            self.assertGetsDefaultInterRepository(dummy_a, repo)
 
553
            # and we should get an InterDummy for a pair it 'likes'
 
554
            self.assertTrue(InterDummy.is_compatible(dummy_a, dummy_b))
 
555
            inter_repo = repository.InterRepository.get(dummy_a, dummy_b)
 
556
            self.assertEqual(InterDummy, inter_repo.__class__)
 
557
            self.assertEqual(dummy_a, inter_repo.source)
 
558
            self.assertEqual(dummy_b, inter_repo.target)
 
559
        finally:
 
560
            repository.InterRepository.unregister_optimiser(InterDummy)
 
561
        # now we should get the default InterRepository object again.
 
562
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
 
563
 
 
564
 
 
565
class TestInterWeaveRepo(TestCaseWithTransport):
 
566
 
 
567
    def test_is_compatible_and_registered(self):
 
568
        # InterWeaveRepo is compatible when either side
 
569
        # is a format 5/6/7 branch
 
570
        from bzrlib.repofmt import knitrepo, weaverepo
 
571
        formats = [weaverepo.RepositoryFormat5(),
 
572
                   weaverepo.RepositoryFormat6(),
 
573
                   weaverepo.RepositoryFormat7()]
 
574
        incompatible_formats = [weaverepo.RepositoryFormat4(),
 
575
                                knitrepo.RepositoryFormatKnit1(),
 
576
                                ]
 
577
        repo_a = self.make_repository('a')
 
578
        repo_b = self.make_repository('b')
 
579
        is_compatible = repository.InterWeaveRepo.is_compatible
 
580
        for source in incompatible_formats:
 
581
            # force incompatible left then right
 
582
            repo_a._format = source
 
583
            repo_b._format = formats[0]
 
584
            self.assertFalse(is_compatible(repo_a, repo_b))
 
585
            self.assertFalse(is_compatible(repo_b, repo_a))
 
586
        for source in formats:
 
587
            repo_a._format = source
 
588
            for target in formats:
 
589
                repo_b._format = target
 
590
                self.assertTrue(is_compatible(repo_a, repo_b))
 
591
        self.assertEqual(repository.InterWeaveRepo,
 
592
                         repository.InterRepository.get(repo_a,
 
593
                                                        repo_b).__class__)
 
594
 
 
595
 
 
596
class TestRepositoryConverter(TestCaseWithTransport):
 
597
 
 
598
    def test_convert_empty(self):
 
599
        t = get_transport(self.get_url('.'))
 
600
        t.mkdir('repository')
 
601
        repo_dir = bzrdir.BzrDirMetaFormat1().initialize('repository')
 
602
        repo = weaverepo.RepositoryFormat7().initialize(repo_dir)
 
603
        target_format = knitrepo.RepositoryFormatKnit1()
 
604
        converter = repository.CopyConverter(target_format)
 
605
        pb = bzrlib.ui.ui_factory.nested_progress_bar()
 
606
        try:
 
607
            converter.convert(repo, pb)
 
608
        finally:
 
609
            pb.finished()
 
610
        repo = repo_dir.open_repository()
 
611
        self.assertTrue(isinstance(target_format, repo._format.__class__))
 
612
 
 
613
 
 
614
class TestMisc(TestCase):
 
615
 
 
616
    def test_unescape_xml(self):
 
617
        """We get some kind of error when malformed entities are passed"""
 
618
        self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;')
 
619
 
 
620
 
 
621
class TestRepositoryFormatKnit3(TestCaseWithTransport):
 
622
 
 
623
    def test_attribute__fetch_order(self):
 
624
        """Knits need topological data insertion."""
 
625
        format = bzrdir.BzrDirMetaFormat1()
 
626
        format.repository_format = knitrepo.RepositoryFormatKnit3()
 
627
        repo = self.make_repository('.', format=format)
 
628
        self.assertEqual('topological', repo._format._fetch_order)
 
629
 
 
630
    def test_attribute__fetch_uses_deltas(self):
 
631
        """Knits reuse deltas."""
 
632
        format = bzrdir.BzrDirMetaFormat1()
 
633
        format.repository_format = knitrepo.RepositoryFormatKnit3()
 
634
        repo = self.make_repository('.', format=format)
 
635
        self.assertEqual(True, repo._format._fetch_uses_deltas)
 
636
 
 
637
    def test_convert(self):
 
638
        """Ensure the upgrade adds weaves for roots"""
 
639
        format = bzrdir.BzrDirMetaFormat1()
 
640
        format.repository_format = knitrepo.RepositoryFormatKnit1()
 
641
        tree = self.make_branch_and_tree('.', format)
 
642
        tree.commit("Dull commit", rev_id="dull")
 
643
        revision_tree = tree.branch.repository.revision_tree('dull')
 
644
        revision_tree.lock_read()
 
645
        try:
 
646
            self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
 
647
                revision_tree.inventory.root.file_id)
 
648
        finally:
 
649
            revision_tree.unlock()
 
650
        format = bzrdir.BzrDirMetaFormat1()
 
651
        format.repository_format = knitrepo.RepositoryFormatKnit3()
 
652
        upgrade.Convert('.', format)
 
653
        tree = workingtree.WorkingTree.open('.')
 
654
        revision_tree = tree.branch.repository.revision_tree('dull')
 
655
        revision_tree.lock_read()
 
656
        try:
 
657
            revision_tree.get_file_lines(revision_tree.inventory.root.file_id)
 
658
        finally:
 
659
            revision_tree.unlock()
 
660
        tree.commit("Another dull commit", rev_id='dull2')
 
661
        revision_tree = tree.branch.repository.revision_tree('dull2')
 
662
        revision_tree.lock_read()
 
663
        self.addCleanup(revision_tree.unlock)
 
664
        self.assertEqual('dull', revision_tree.inventory.root.revision)
 
665
 
 
666
    def test_supports_external_lookups(self):
 
667
        format = bzrdir.BzrDirMetaFormat1()
 
668
        format.repository_format = knitrepo.RepositoryFormatKnit3()
 
669
        repo = self.make_repository('.', format=format)
 
670
        self.assertFalse(repo._format.supports_external_lookups)
 
671
 
 
672
 
 
673
class TestDevelopment6(TestCaseWithTransport):
 
674
 
 
675
    def test_inventories_use_chk_map_with_parent_base_dict(self):
 
676
        tree = self.make_branch_and_tree('repo', format="development6-rich-root")
 
677
        revid = tree.commit("foo")
 
678
        tree.lock_read()
 
679
        self.addCleanup(tree.unlock)
 
680
        inv = tree.branch.repository.get_inventory(revid)
 
681
        self.assertNotEqual(None, inv.parent_id_basename_to_file_id)
 
682
        inv.parent_id_basename_to_file_id._ensure_root()
 
683
        inv.id_to_entry._ensure_root()
 
684
        self.assertEqual(65536, inv.id_to_entry._root_node.maximum_size)
 
685
        self.assertEqual(65536,
 
686
            inv.parent_id_basename_to_file_id._root_node.maximum_size)
 
687
 
 
688
 
 
689
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
 
690
    """Tests for _find_parent_ids_of_revisions."""
 
691
 
 
692
    def setUp(self):
 
693
        super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
 
694
        self.builder = self.make_branch_builder('source',
 
695
            format='development6-rich-root')
 
696
        self.builder.start_series()
 
697
        self.builder.build_snapshot('initial', None,
 
698
            [('add', ('', 'tree-root', 'directory', None))])
 
699
        self.repo = self.builder.get_branch().repository
 
700
        self.addCleanup(self.builder.finish_series)
 
701
 
 
702
    def assertParentIds(self, expected_result, rev_set):
 
703
        self.assertEqual(sorted(expected_result),
 
704
            sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
 
705
 
 
706
    def test_simple(self):
 
707
        self.builder.build_snapshot('revid1', None, [])
 
708
        self.builder.build_snapshot('revid2', ['revid1'], [])
 
709
        rev_set = ['revid2']
 
710
        self.assertParentIds(['revid1'], rev_set)
 
711
 
 
712
    def test_not_first_parent(self):
 
713
        self.builder.build_snapshot('revid1', None, [])
 
714
        self.builder.build_snapshot('revid2', ['revid1'], [])
 
715
        self.builder.build_snapshot('revid3', ['revid2'], [])
 
716
        rev_set = ['revid3', 'revid2']
 
717
        self.assertParentIds(['revid1'], rev_set)
 
718
 
 
719
    def test_not_null(self):
 
720
        rev_set = ['initial']
 
721
        self.assertParentIds([], rev_set)
 
722
 
 
723
    def test_not_null_set(self):
 
724
        self.builder.build_snapshot('revid1', None, [])
 
725
        rev_set = [_mod_revision.NULL_REVISION]
 
726
        self.assertParentIds([], rev_set)
 
727
 
 
728
    def test_ghost(self):
 
729
        self.builder.build_snapshot('revid1', None, [])
 
730
        rev_set = ['ghost', 'revid1']
 
731
        self.assertParentIds(['initial'], rev_set)
 
732
 
 
733
    def test_ghost_parent(self):
 
734
        self.builder.build_snapshot('revid1', None, [])
 
735
        self.builder.build_snapshot('revid2', ['revid1', 'ghost'], [])
 
736
        rev_set = ['revid2', 'revid1']
 
737
        self.assertParentIds(['ghost', 'initial'], rev_set)
 
738
 
 
739
    def test_righthand_parent(self):
 
740
        self.builder.build_snapshot('revid1', None, [])
 
741
        self.builder.build_snapshot('revid2a', ['revid1'], [])
 
742
        self.builder.build_snapshot('revid2b', ['revid1'], [])
 
743
        self.builder.build_snapshot('revid3', ['revid2a', 'revid2b'], [])
 
744
        rev_set = ['revid3', 'revid2a']
 
745
        self.assertParentIds(['revid1', 'revid2b'], rev_set)
 
746
 
 
747
 
 
748
class TestWithBrokenRepo(TestCaseWithTransport):
 
749
    """These tests seem to be more appropriate as interface tests?"""
 
750
 
 
751
    def make_broken_repository(self):
 
752
        # XXX: This function is borrowed from Aaron's "Reconcile can fix bad
 
753
        # parent references" branch which is due to land in bzr.dev soon.  Once
 
754
        # it does, this duplication should be removed.
 
755
        repo = self.make_repository('broken-repo')
 
756
        cleanups = []
 
757
        try:
 
758
            repo.lock_write()
 
759
            cleanups.append(repo.unlock)
 
760
            repo.start_write_group()
 
761
            cleanups.append(repo.commit_write_group)
 
762
            # make rev1a: A well-formed revision, containing 'file1'
 
763
            inv = inventory.Inventory(revision_id='rev1a')
 
764
            inv.root.revision = 'rev1a'
 
765
            self.add_file(repo, inv, 'file1', 'rev1a', [])
 
766
            repo.add_inventory('rev1a', inv, [])
 
767
            revision = _mod_revision.Revision('rev1a',
 
768
                committer='jrandom@example.com', timestamp=0,
 
769
                inventory_sha1='', timezone=0, message='foo', parent_ids=[])
 
770
            repo.add_revision('rev1a',revision, inv)
 
771
 
 
772
            # make rev1b, which has no Revision, but has an Inventory, and
 
773
            # file1
 
774
            inv = inventory.Inventory(revision_id='rev1b')
 
775
            inv.root.revision = 'rev1b'
 
776
            self.add_file(repo, inv, 'file1', 'rev1b', [])
 
777
            repo.add_inventory('rev1b', inv, [])
 
778
 
 
779
            # make rev2, with file1 and file2
 
780
            # file2 is sane
 
781
            # file1 has 'rev1b' as an ancestor, even though this is not
 
782
            # mentioned by 'rev1a', making it an unreferenced ancestor
 
783
            inv = inventory.Inventory()
 
784
            self.add_file(repo, inv, 'file1', 'rev2', ['rev1a', 'rev1b'])
 
785
            self.add_file(repo, inv, 'file2', 'rev2', [])
 
786
            self.add_revision(repo, 'rev2', inv, ['rev1a'])
 
787
 
 
788
            # make ghost revision rev1c
 
789
            inv = inventory.Inventory()
 
790
            self.add_file(repo, inv, 'file2', 'rev1c', [])
 
791
 
 
792
            # make rev3 with file2
 
793
            # file2 refers to 'rev1c', which is a ghost in this repository, so
 
794
            # file2 cannot have rev1c as its ancestor.
 
795
            inv = inventory.Inventory()
 
796
            self.add_file(repo, inv, 'file2', 'rev3', ['rev1c'])
 
797
            self.add_revision(repo, 'rev3', inv, ['rev1c'])
 
798
            return repo
 
799
        finally:
 
800
            for cleanup in reversed(cleanups):
 
801
                cleanup()
 
802
 
 
803
    def add_revision(self, repo, revision_id, inv, parent_ids):
 
804
        inv.revision_id = revision_id
 
805
        inv.root.revision = revision_id
 
806
        repo.add_inventory(revision_id, inv, parent_ids)
 
807
        revision = _mod_revision.Revision(revision_id,
 
808
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
 
809
            timezone=0, message='foo', parent_ids=parent_ids)
 
810
        repo.add_revision(revision_id,revision, inv)
 
811
 
 
812
    def add_file(self, repo, inv, filename, revision, parents):
 
813
        file_id = filename + '-id'
 
814
        entry = inventory.InventoryFile(file_id, filename, 'TREE_ROOT')
 
815
        entry.revision = revision
 
816
        entry.text_size = 0
 
817
        inv.add(entry)
 
818
        text_key = (file_id, revision)
 
819
        parent_keys = [(file_id, parent) for parent in parents]
 
820
        repo.texts.add_lines(text_key, parent_keys, ['line\n'])
 
821
 
 
822
    def test_insert_from_broken_repo(self):
 
823
        """Inserting a data stream from a broken repository won't silently
 
824
        corrupt the target repository.
 
825
        """
 
826
        broken_repo = self.make_broken_repository()
 
827
        empty_repo = self.make_repository('empty-repo')
 
828
        self.assertRaises((errors.RevisionNotPresent, errors.BzrCheckError),
 
829
                          empty_repo.fetch, broken_repo)
 
830
 
 
831
 
 
832
class TestRepositoryPackCollection(TestCaseWithTransport):
 
833
 
 
834
    def get_format(self):
 
835
        return bzrdir.format_registry.make_bzrdir('pack-0.92')
 
836
 
 
837
    def get_packs(self):
 
838
        format = self.get_format()
 
839
        repo = self.make_repository('.', format=format)
 
840
        return repo._pack_collection
 
841
 
 
842
    def make_packs_and_alt_repo(self, write_lock=False):
 
843
        """Create a pack repo with 3 packs, and access it via a second repo."""
 
844
        tree = self.make_branch_and_tree('.')
 
845
        tree.lock_write()
 
846
        self.addCleanup(tree.unlock)
 
847
        rev1 = tree.commit('one')
 
848
        rev2 = tree.commit('two')
 
849
        rev3 = tree.commit('three')
 
850
        r = repository.Repository.open('.')
 
851
        if write_lock:
 
852
            r.lock_write()
 
853
        else:
 
854
            r.lock_read()
 
855
        self.addCleanup(r.unlock)
 
856
        packs = r._pack_collection
 
857
        packs.ensure_loaded()
 
858
        return tree, r, packs, [rev1, rev2, rev3]
 
859
 
 
860
    def test__max_pack_count(self):
 
861
        """The maximum pack count is a function of the number of revisions."""
 
862
        # no revisions - one pack, so that we can have a revision free repo
 
863
        # without it blowing up
 
864
        packs = self.get_packs()
 
865
        self.assertEqual(1, packs._max_pack_count(0))
 
866
        # after that the sum of the digits, - check the first 1-9
 
867
        self.assertEqual(1, packs._max_pack_count(1))
 
868
        self.assertEqual(2, packs._max_pack_count(2))
 
869
        self.assertEqual(3, packs._max_pack_count(3))
 
870
        self.assertEqual(4, packs._max_pack_count(4))
 
871
        self.assertEqual(5, packs._max_pack_count(5))
 
872
        self.assertEqual(6, packs._max_pack_count(6))
 
873
        self.assertEqual(7, packs._max_pack_count(7))
 
874
        self.assertEqual(8, packs._max_pack_count(8))
 
875
        self.assertEqual(9, packs._max_pack_count(9))
 
876
        # check the boundary cases with two digits for the next decade
 
877
        self.assertEqual(1, packs._max_pack_count(10))
 
878
        self.assertEqual(2, packs._max_pack_count(11))
 
879
        self.assertEqual(10, packs._max_pack_count(19))
 
880
        self.assertEqual(2, packs._max_pack_count(20))
 
881
        self.assertEqual(3, packs._max_pack_count(21))
 
882
        # check some arbitrary big numbers
 
883
        self.assertEqual(25, packs._max_pack_count(112894))
 
884
 
 
885
    def test_pack_distribution_zero(self):
 
886
        packs = self.get_packs()
 
887
        self.assertEqual([0], packs.pack_distribution(0))
 
888
 
 
889
    def test_ensure_loaded_unlocked(self):
 
890
        packs = self.get_packs()
 
891
        self.assertRaises(errors.ObjectNotLocked,
 
892
                          packs.ensure_loaded)
 
893
 
 
894
    def test_pack_distribution_one_to_nine(self):
 
895
        packs = self.get_packs()
 
896
        self.assertEqual([1],
 
897
            packs.pack_distribution(1))
 
898
        self.assertEqual([1, 1],
 
899
            packs.pack_distribution(2))
 
900
        self.assertEqual([1, 1, 1],
 
901
            packs.pack_distribution(3))
 
902
        self.assertEqual([1, 1, 1, 1],
 
903
            packs.pack_distribution(4))
 
904
        self.assertEqual([1, 1, 1, 1, 1],
 
905
            packs.pack_distribution(5))
 
906
        self.assertEqual([1, 1, 1, 1, 1, 1],
 
907
            packs.pack_distribution(6))
 
908
        self.assertEqual([1, 1, 1, 1, 1, 1, 1],
 
909
            packs.pack_distribution(7))
 
910
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1],
 
911
            packs.pack_distribution(8))
 
912
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1, 1],
 
913
            packs.pack_distribution(9))
 
914
 
 
915
    def test_pack_distribution_stable_at_boundaries(self):
 
916
        """When there are multi-rev packs the counts are stable."""
 
917
        packs = self.get_packs()
 
918
        # in 10s:
 
919
        self.assertEqual([10], packs.pack_distribution(10))
 
920
        self.assertEqual([10, 1], packs.pack_distribution(11))
 
921
        self.assertEqual([10, 10], packs.pack_distribution(20))
 
922
        self.assertEqual([10, 10, 1], packs.pack_distribution(21))
 
923
        # 100s
 
924
        self.assertEqual([100], packs.pack_distribution(100))
 
925
        self.assertEqual([100, 1], packs.pack_distribution(101))
 
926
        self.assertEqual([100, 10, 1], packs.pack_distribution(111))
 
927
        self.assertEqual([100, 100], packs.pack_distribution(200))
 
928
        self.assertEqual([100, 100, 1], packs.pack_distribution(201))
 
929
        self.assertEqual([100, 100, 10, 1], packs.pack_distribution(211))
 
930
 
 
931
    def test_plan_pack_operations_2009_revisions_skip_all_packs(self):
 
932
        packs = self.get_packs()
 
933
        existing_packs = [(2000, "big"), (9, "medium")]
 
934
        # rev count - 2009 -> 2x1000 + 9x1
 
935
        pack_operations = packs.plan_autopack_combinations(
 
936
            existing_packs, [1000, 1000, 1, 1, 1, 1, 1, 1, 1, 1, 1])
 
937
        self.assertEqual([], pack_operations)
 
938
 
 
939
    def test_plan_pack_operations_2010_revisions_skip_all_packs(self):
 
940
        packs = self.get_packs()
 
941
        existing_packs = [(2000, "big"), (9, "medium"), (1, "single")]
 
942
        # rev count - 2010 -> 2x1000 + 1x10
 
943
        pack_operations = packs.plan_autopack_combinations(
 
944
            existing_packs, [1000, 1000, 10])
 
945
        self.assertEqual([], pack_operations)
 
946
 
 
947
    def test_plan_pack_operations_2010_combines_smallest_two(self):
 
948
        packs = self.get_packs()
 
949
        existing_packs = [(1999, "big"), (9, "medium"), (1, "single2"),
 
950
            (1, "single1")]
 
951
        # rev count - 2010 -> 2x1000 + 1x10 (3)
 
952
        pack_operations = packs.plan_autopack_combinations(
 
953
            existing_packs, [1000, 1000, 10])
 
954
        self.assertEqual([[2, ["single2", "single1"]]], pack_operations)
 
955
 
 
956
    def test_plan_pack_operations_creates_a_single_op(self):
 
957
        packs = self.get_packs()
 
958
        existing_packs = [(50, 'a'), (40, 'b'), (30, 'c'), (10, 'd'),
 
959
                          (10, 'e'), (6, 'f'), (4, 'g')]
 
960
        # rev count 150 -> 1x100 and 5x10
 
961
        # The two size 10 packs do not need to be touched. The 50, 40, 30 would
 
962
        # be combined into a single 120 size pack, and the 6 & 4 would
 
963
        # becombined into a size 10 pack. However, if we have to rewrite them,
 
964
        # we save a pack file with no increased I/O by putting them into the
 
965
        # same file.
 
966
        distribution = packs.pack_distribution(150)
 
967
        pack_operations = packs.plan_autopack_combinations(existing_packs,
 
968
                                                           distribution)
 
969
        self.assertEqual([[130, ['a', 'b', 'c', 'f', 'g']]], pack_operations)
 
970
 
 
971
    def test_all_packs_none(self):
 
972
        format = self.get_format()
 
973
        tree = self.make_branch_and_tree('.', format=format)
 
974
        tree.lock_read()
 
975
        self.addCleanup(tree.unlock)
 
976
        packs = tree.branch.repository._pack_collection
 
977
        packs.ensure_loaded()
 
978
        self.assertEqual([], packs.all_packs())
 
979
 
 
980
    def test_all_packs_one(self):
 
981
        format = self.get_format()
 
982
        tree = self.make_branch_and_tree('.', format=format)
 
983
        tree.commit('start')
 
984
        tree.lock_read()
 
985
        self.addCleanup(tree.unlock)
 
986
        packs = tree.branch.repository._pack_collection
 
987
        packs.ensure_loaded()
 
988
        self.assertEqual([
 
989
            packs.get_pack_by_name(packs.names()[0])],
 
990
            packs.all_packs())
 
991
 
 
992
    def test_all_packs_two(self):
 
993
        format = self.get_format()
 
994
        tree = self.make_branch_and_tree('.', format=format)
 
995
        tree.commit('start')
 
996
        tree.commit('continue')
 
997
        tree.lock_read()
 
998
        self.addCleanup(tree.unlock)
 
999
        packs = tree.branch.repository._pack_collection
 
1000
        packs.ensure_loaded()
 
1001
        self.assertEqual([
 
1002
            packs.get_pack_by_name(packs.names()[0]),
 
1003
            packs.get_pack_by_name(packs.names()[1]),
 
1004
            ], packs.all_packs())
 
1005
 
 
1006
    def test_get_pack_by_name(self):
 
1007
        format = self.get_format()
 
1008
        tree = self.make_branch_and_tree('.', format=format)
 
1009
        tree.commit('start')
 
1010
        tree.lock_read()
 
1011
        self.addCleanup(tree.unlock)
 
1012
        packs = tree.branch.repository._pack_collection
 
1013
        packs.reset()
 
1014
        packs.ensure_loaded()
 
1015
        name = packs.names()[0]
 
1016
        pack_1 = packs.get_pack_by_name(name)
 
1017
        # the pack should be correctly initialised
 
1018
        sizes = packs._names[name]
 
1019
        rev_index = GraphIndex(packs._index_transport, name + '.rix', sizes[0])
 
1020
        inv_index = GraphIndex(packs._index_transport, name + '.iix', sizes[1])
 
1021
        txt_index = GraphIndex(packs._index_transport, name + '.tix', sizes[2])
 
1022
        sig_index = GraphIndex(packs._index_transport, name + '.six', sizes[3])
 
1023
        self.assertEqual(pack_repo.ExistingPack(packs._pack_transport,
 
1024
            name, rev_index, inv_index, txt_index, sig_index), pack_1)
 
1025
        # and the same instance should be returned on successive calls.
 
1026
        self.assertTrue(pack_1 is packs.get_pack_by_name(name))
 
1027
 
 
1028
    def test_reload_pack_names_new_entry(self):
 
1029
        tree, r, packs, revs = self.make_packs_and_alt_repo()
 
1030
        names = packs.names()
 
1031
        # Add a new pack file into the repository
 
1032
        rev4 = tree.commit('four')
 
1033
        new_names = tree.branch.repository._pack_collection.names()
 
1034
        new_name = set(new_names).difference(names)
 
1035
        self.assertEqual(1, len(new_name))
 
1036
        new_name = new_name.pop()
 
1037
        # The old collection hasn't noticed yet
 
1038
        self.assertEqual(names, packs.names())
 
1039
        self.assertTrue(packs.reload_pack_names())
 
1040
        self.assertEqual(new_names, packs.names())
 
1041
        # And the repository can access the new revision
 
1042
        self.assertEqual({rev4:(revs[-1],)}, r.get_parent_map([rev4]))
 
1043
        self.assertFalse(packs.reload_pack_names())
 
1044
 
 
1045
    def test_reload_pack_names_added_and_removed(self):
 
1046
        tree, r, packs, revs = self.make_packs_and_alt_repo()
 
1047
        names = packs.names()
 
1048
        # Now repack the whole thing
 
1049
        tree.branch.repository.pack()
 
1050
        new_names = tree.branch.repository._pack_collection.names()
 
1051
        # The other collection hasn't noticed yet
 
1052
        self.assertEqual(names, packs.names())
 
1053
        self.assertTrue(packs.reload_pack_names())
 
1054
        self.assertEqual(new_names, packs.names())
 
1055
        self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
 
1056
        self.assertFalse(packs.reload_pack_names())
 
1057
 
 
1058
    def test_autopack_reloads_and_stops(self):
 
1059
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1060
        # After we have determined what needs to be autopacked, trigger a
 
1061
        # full-pack via the other repo which will cause us to re-evaluate and
 
1062
        # decide we don't need to do anything
 
1063
        orig_execute = packs._execute_pack_operations
 
1064
        def _munged_execute_pack_ops(*args, **kwargs):
 
1065
            tree.branch.repository.pack()
 
1066
            return orig_execute(*args, **kwargs)
 
1067
        packs._execute_pack_operations = _munged_execute_pack_ops
 
1068
        packs._max_pack_count = lambda x: 1
 
1069
        packs.pack_distribution = lambda x: [10]
 
1070
        self.assertFalse(packs.autopack())
 
1071
        self.assertEqual(1, len(packs.names()))
 
1072
        self.assertEqual(tree.branch.repository._pack_collection.names(),
 
1073
                         packs.names())
 
1074
 
 
1075
 
 
1076
class TestPack(TestCaseWithTransport):
 
1077
    """Tests for the Pack object."""
 
1078
 
 
1079
    def assertCurrentlyEqual(self, left, right):
 
1080
        self.assertTrue(left == right)
 
1081
        self.assertTrue(right == left)
 
1082
        self.assertFalse(left != right)
 
1083
        self.assertFalse(right != left)
 
1084
 
 
1085
    def assertCurrentlyNotEqual(self, left, right):
 
1086
        self.assertFalse(left == right)
 
1087
        self.assertFalse(right == left)
 
1088
        self.assertTrue(left != right)
 
1089
        self.assertTrue(right != left)
 
1090
 
 
1091
    def test___eq____ne__(self):
 
1092
        left = pack_repo.ExistingPack('', '', '', '', '', '')
 
1093
        right = pack_repo.ExistingPack('', '', '', '', '', '')
 
1094
        self.assertCurrentlyEqual(left, right)
 
1095
        # change all attributes and ensure equality changes as we do.
 
1096
        left.revision_index = 'a'
 
1097
        self.assertCurrentlyNotEqual(left, right)
 
1098
        right.revision_index = 'a'
 
1099
        self.assertCurrentlyEqual(left, right)
 
1100
        left.inventory_index = 'a'
 
1101
        self.assertCurrentlyNotEqual(left, right)
 
1102
        right.inventory_index = 'a'
 
1103
        self.assertCurrentlyEqual(left, right)
 
1104
        left.text_index = 'a'
 
1105
        self.assertCurrentlyNotEqual(left, right)
 
1106
        right.text_index = 'a'
 
1107
        self.assertCurrentlyEqual(left, right)
 
1108
        left.signature_index = 'a'
 
1109
        self.assertCurrentlyNotEqual(left, right)
 
1110
        right.signature_index = 'a'
 
1111
        self.assertCurrentlyEqual(left, right)
 
1112
        left.name = 'a'
 
1113
        self.assertCurrentlyNotEqual(left, right)
 
1114
        right.name = 'a'
 
1115
        self.assertCurrentlyEqual(left, right)
 
1116
        left.transport = 'a'
 
1117
        self.assertCurrentlyNotEqual(left, right)
 
1118
        right.transport = 'a'
 
1119
        self.assertCurrentlyEqual(left, right)
 
1120
 
 
1121
    def test_file_name(self):
 
1122
        pack = pack_repo.ExistingPack('', 'a_name', '', '', '', '')
 
1123
        self.assertEqual('a_name.pack', pack.file_name())
 
1124
 
 
1125
 
 
1126
class TestNewPack(TestCaseWithTransport):
 
1127
    """Tests for pack_repo.NewPack."""
 
1128
 
 
1129
    def test_new_instance_attributes(self):
 
1130
        upload_transport = self.get_transport('upload')
 
1131
        pack_transport = self.get_transport('pack')
 
1132
        index_transport = self.get_transport('index')
 
1133
        upload_transport.mkdir('.')
 
1134
        collection = pack_repo.RepositoryPackCollection(
 
1135
            repo=None,
 
1136
            transport=self.get_transport('.'),
 
1137
            index_transport=index_transport,
 
1138
            upload_transport=upload_transport,
 
1139
            pack_transport=pack_transport,
 
1140
            index_builder_class=BTreeBuilder,
 
1141
            index_class=BTreeGraphIndex,
 
1142
            use_chk_index=False)
 
1143
        pack = pack_repo.NewPack(collection)
 
1144
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
 
1145
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
 
1146
        self.assertIsInstance(pack._hash, type(osutils.md5()))
 
1147
        self.assertTrue(pack.upload_transport is upload_transport)
 
1148
        self.assertTrue(pack.index_transport is index_transport)
 
1149
        self.assertTrue(pack.pack_transport is pack_transport)
 
1150
        self.assertEqual(None, pack.index_sizes)
 
1151
        self.assertEqual(20, len(pack.random_name))
 
1152
        self.assertIsInstance(pack.random_name, str)
 
1153
        self.assertIsInstance(pack.start_time, float)
 
1154
 
 
1155
 
 
1156
class TestPacker(TestCaseWithTransport):
 
1157
    """Tests for the packs repository Packer class."""
 
1158
 
 
1159
    def test_pack_optimizes_pack_order(self):
 
1160
        builder = self.make_branch_builder('.')
 
1161
        builder.start_series()
 
1162
        builder.build_snapshot('A', None, [
 
1163
            ('add', ('', 'root-id', 'directory', None)),
 
1164
            ('add', ('f', 'f-id', 'file', 'content\n'))])
 
1165
        builder.build_snapshot('B', ['A'],
 
1166
            [('modify', ('f-id', 'new-content\n'))])
 
1167
        builder.build_snapshot('C', ['B'],
 
1168
            [('modify', ('f-id', 'third-content\n'))])
 
1169
        builder.build_snapshot('D', ['C'],
 
1170
            [('modify', ('f-id', 'fourth-content\n'))])
 
1171
        b = builder.get_branch()
 
1172
        b.lock_read()
 
1173
        builder.finish_series()
 
1174
        self.addCleanup(b.unlock)
 
1175
        # At this point, we should have 4 pack files available
 
1176
        # Because of how they were built, they correspond to
 
1177
        # ['D', 'C', 'B', 'A']
 
1178
        packs = b.repository._pack_collection.packs
 
1179
        packer = pack_repo.Packer(b.repository._pack_collection,
 
1180
                                  packs, 'testing',
 
1181
                                  revision_ids=['B', 'C'])
 
1182
        # Now, when we are copying the B & C revisions, their pack files should
 
1183
        # be moved to the front of the stack
 
1184
        # The new ordering moves B & C to the front of the .packs attribute,
 
1185
        # and leaves the others in the original order.
 
1186
        new_packs = [packs[1], packs[2], packs[0], packs[3]]
 
1187
        new_pack = packer.pack()
 
1188
        self.assertEqual(new_packs, packer.packs)
 
1189
 
 
1190
 
 
1191
class TestOptimisingPacker(TestCaseWithTransport):
 
1192
    """Tests for the OptimisingPacker class."""
 
1193
 
 
1194
    def get_pack_collection(self):
 
1195
        repo = self.make_repository('.')
 
1196
        return repo._pack_collection
 
1197
 
 
1198
    def test_open_pack_will_optimise(self):
 
1199
        packer = pack_repo.OptimisingPacker(self.get_pack_collection(),
 
1200
                                            [], '.test')
 
1201
        new_pack = packer.open_pack()
 
1202
        self.assertIsInstance(new_pack, pack_repo.NewPack)
 
1203
        self.assertTrue(new_pack.revision_index._optimize_for_size)
 
1204
        self.assertTrue(new_pack.inventory_index._optimize_for_size)
 
1205
        self.assertTrue(new_pack.text_index._optimize_for_size)
 
1206
        self.assertTrue(new_pack.signature_index._optimize_for_size)
 
1207
 
 
1208
 
 
1209
class TestGCCHKPackCollection(TestCaseWithTransport):
 
1210
 
 
1211
    def test_stream_source_to_gc(self):
 
1212
        source = self.make_repository('source', format='development6-rich-root')
 
1213
        target = self.make_repository('target', format='development6-rich-root')
 
1214
        stream = source._get_source(target._format)
 
1215
        self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
 
1216
 
 
1217
    def test_stream_source_to_non_gc(self):
 
1218
        source = self.make_repository('source', format='development6-rich-root')
 
1219
        target = self.make_repository('target', format='rich-root-pack')
 
1220
        stream = source._get_source(target._format)
 
1221
        # We don't want the child GroupCHKStreamSource
 
1222
        self.assertIs(type(stream), repository.StreamSource)
 
1223
 
 
1224
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
 
1225
        source_builder = self.make_branch_builder('source',
 
1226
                            format='development6-rich-root')
 
1227
        # We have to build a fairly large tree, so that we are sure the chk
 
1228
        # pages will have split into multiple pages.
 
1229
        entries = [('add', ('', 'a-root-id', 'directory', None))]
 
1230
        for i in 'abcdefghijklmnopqrstuvwxyz123456789':
 
1231
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
 
1232
                fname = i + j
 
1233
                fid = fname + '-id'
 
1234
                content = 'content for %s\n' % (fname,)
 
1235
                entries.append(('add', (fname, fid, 'file', content)))
 
1236
        source_builder.start_series()
 
1237
        source_builder.build_snapshot('rev-1', None, entries)
 
1238
        # Now change a few of them, so we get a few new pages for the second
 
1239
        # revision
 
1240
        source_builder.build_snapshot('rev-2', ['rev-1'], [
 
1241
            ('modify', ('aa-id', 'new content for aa-id\n')),
 
1242
            ('modify', ('cc-id', 'new content for cc-id\n')),
 
1243
            ('modify', ('zz-id', 'new content for zz-id\n')),
 
1244
            ])
 
1245
        source_builder.finish_series()
 
1246
        source_branch = source_builder.get_branch()
 
1247
        source_branch.lock_read()
 
1248
        self.addCleanup(source_branch.unlock)
 
1249
        target = self.make_repository('target', format='development6-rich-root')
 
1250
        source = source_branch.repository._get_source(target._format)
 
1251
        self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
 
1252
 
 
1253
        # On a regular pass, getting the inventories and chk pages for rev-2
 
1254
        # would only get the newly created chk pages
 
1255
        search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1,
 
1256
                                    set(['rev-2']))
 
1257
        simple_chk_records = []
 
1258
        for vf_name, substream in source.get_stream(search):
 
1259
            if vf_name == 'chk_bytes':
 
1260
                for record in substream:
 
1261
                    simple_chk_records.append(record.key)
 
1262
            else:
 
1263
                for _ in substream:
 
1264
                    continue
 
1265
        # 3 pages, the root (InternalNode), + 2 pages which actually changed
 
1266
        self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
 
1267
                          ('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
 
1268
                          ('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
 
1269
                          ('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
 
1270
                         simple_chk_records)
 
1271
        # Now, when we do a similar call using 'get_stream_for_missing_keys'
 
1272
        # we should get a much larger set of pages.
 
1273
        missing = [('inventories', 'rev-2')]
 
1274
        full_chk_records = []
 
1275
        for vf_name, substream in source.get_stream_for_missing_keys(missing):
 
1276
            if vf_name == 'inventories':
 
1277
                for record in substream:
 
1278
                    self.assertEqual(('rev-2',), record.key)
 
1279
            elif vf_name == 'chk_bytes':
 
1280
                for record in substream:
 
1281
                    full_chk_records.append(record.key)
 
1282
            else:
 
1283
                self.fail('Should not be getting a stream of %s' % (vf_name,))
 
1284
        # We have 257 records now. This is because we have 1 root page, and 256
 
1285
        # leaf pages in a complete listing.
 
1286
        self.assertEqual(257, len(full_chk_records))
 
1287
        self.assertSubset(simple_chk_records, full_chk_records)