/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_pack_repository.py

Merge with serialize-transform

Show diffs side-by-side

added added

removed removed

Lines of Context:
 
1
# Copyright (C) 2008 Canonical Ltd
 
2
#
 
3
# This program is free software; you can redistribute it and/or modify
 
4
# it under the terms of the GNU General Public License as published by
 
5
# the Free Software Foundation; either version 2 of the License, or
 
6
# (at your option) any later version.
 
7
#
 
8
# This program is distributed in the hope that it will be useful,
 
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
11
# GNU General Public License for more details.
 
12
#
 
13
# You should have received a copy of the GNU General Public License
 
14
# along with this program; if not, write to the Free Software
 
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
 
16
 
 
17
"""Tests for pack repositories.
 
18
 
 
19
These tests are repeated for all pack-based repository formats.
 
20
"""
 
21
 
 
22
from cStringIO import StringIO
 
23
from stat import S_ISDIR
 
24
 
 
25
from bzrlib.btree_index import BTreeGraphIndex
 
26
from bzrlib.index import GraphIndex
 
27
from bzrlib import (
 
28
    bzrdir,
 
29
    errors,
 
30
    inventory,
 
31
    progress,
 
32
    repository,
 
33
    revision as _mod_revision,
 
34
    symbol_versioning,
 
35
    tests,
 
36
    ui,
 
37
    upgrade,
 
38
    workingtree,
 
39
    )
 
40
from bzrlib.tests import (
 
41
    TestCase,
 
42
    TestCaseWithTransport,
 
43
    TestNotApplicable,
 
44
    TestSkipped,
 
45
    )
 
46
from bzrlib.transport import (
 
47
    fakenfs,
 
48
    get_transport,
 
49
    )
 
50
 
 
51
 
 
52
class TestPackRepository(TestCaseWithTransport):
 
53
    """Tests to be repeated across all pack-based formats.
 
54
 
 
55
    The following are populated from the test scenario:
 
56
 
 
57
    :ivar format_name: Registered name fo the format to test.
 
58
    :ivar format_string: On-disk format marker.
 
59
    :ivar format_supports_external_lookups: Boolean.
 
60
    """
 
61
 
 
62
    def get_format(self):
 
63
        return bzrdir.format_registry.make_bzrdir(self.format_name)
 
64
 
 
65
    def test_attribute__fetch_order(self):
 
66
        """Packs do not need ordered data retrieval."""
 
67
        format = self.get_format()
 
68
        repo = self.make_repository('.', format=format)
 
69
        self.assertEqual('unordered', repo._fetch_order)
 
70
 
 
71
    def test_attribute__fetch_uses_deltas(self):
 
72
        """Packs reuse deltas."""
 
73
        format = self.get_format()
 
74
        repo = self.make_repository('.', format=format)
 
75
        self.assertEqual(True, repo._fetch_uses_deltas)
 
76
 
 
77
    def test_disk_layout(self):
 
78
        format = self.get_format()
 
79
        repo = self.make_repository('.', format=format)
 
80
        # in case of side effects of locking.
 
81
        repo.lock_write()
 
82
        repo.unlock()
 
83
        t = repo.bzrdir.get_repository_transport(None)
 
84
        self.check_format(t)
 
85
        # XXX: no locks left when unlocked at the moment
 
86
        # self.assertEqualDiff('', t.get('lock').read())
 
87
        self.check_databases(t)
 
88
 
 
89
    def check_format(self, t):
 
90
        self.assertEqualDiff(
 
91
            self.format_string, # from scenario
 
92
            t.get('format').read())
 
93
 
 
94
    def assertHasNoKndx(self, t, knit_name):
 
95
        """Assert that knit_name has no index on t."""
 
96
        self.assertFalse(t.has(knit_name + '.kndx'))
 
97
 
 
98
    def assertHasNoKnit(self, t, knit_name):
 
99
        """Assert that knit_name exists on t."""
 
100
        # no default content
 
101
        self.assertFalse(t.has(knit_name + '.knit'))
 
102
 
 
103
    def check_databases(self, t):
 
104
        """check knit content for a repository."""
 
105
        # check conversion worked
 
106
        self.assertHasNoKndx(t, 'inventory')
 
107
        self.assertHasNoKnit(t, 'inventory')
 
108
        self.assertHasNoKndx(t, 'revisions')
 
109
        self.assertHasNoKnit(t, 'revisions')
 
110
        self.assertHasNoKndx(t, 'signatures')
 
111
        self.assertHasNoKnit(t, 'signatures')
 
112
        self.assertFalse(t.has('knits'))
 
113
        # revision-indexes file-container directory
 
114
        self.assertEqual([],
 
115
            list(self.index_class(t, 'pack-names', None).iter_all_entries()))
 
116
        self.assertTrue(S_ISDIR(t.stat('packs').st_mode))
 
117
        self.assertTrue(S_ISDIR(t.stat('upload').st_mode))
 
118
        self.assertTrue(S_ISDIR(t.stat('indices').st_mode))
 
119
        self.assertTrue(S_ISDIR(t.stat('obsolete_packs').st_mode))
 
120
 
 
121
    def test_shared_disk_layout(self):
 
122
        format = self.get_format()
 
123
        repo = self.make_repository('.', shared=True, format=format)
 
124
        # we want:
 
125
        t = repo.bzrdir.get_repository_transport(None)
 
126
        self.check_format(t)
 
127
        # XXX: no locks left when unlocked at the moment
 
128
        # self.assertEqualDiff('', t.get('lock').read())
 
129
        # We should have a 'shared-storage' marker file.
 
130
        self.assertEqualDiff('', t.get('shared-storage').read())
 
131
        self.check_databases(t)
 
132
 
 
133
    def test_shared_no_tree_disk_layout(self):
 
134
        format = self.get_format()
 
135
        repo = self.make_repository('.', shared=True, format=format)
 
136
        repo.set_make_working_trees(False)
 
137
        # we want:
 
138
        t = repo.bzrdir.get_repository_transport(None)
 
139
        self.check_format(t)
 
140
        # XXX: no locks left when unlocked at the moment
 
141
        # self.assertEqualDiff('', t.get('lock').read())
 
142
        # We should have a 'shared-storage' marker file.
 
143
        self.assertEqualDiff('', t.get('shared-storage').read())
 
144
        # We should have a marker for the no-working-trees flag.
 
145
        self.assertEqualDiff('', t.get('no-working-trees').read())
 
146
        # The marker should go when we toggle the setting.
 
147
        repo.set_make_working_trees(True)
 
148
        self.assertFalse(t.has('no-working-trees'))
 
149
        self.check_databases(t)
 
150
 
 
151
    def test_adding_revision_creates_pack_indices(self):
 
152
        format = self.get_format()
 
153
        tree = self.make_branch_and_tree('.', format=format)
 
154
        trans = tree.branch.repository.bzrdir.get_repository_transport(None)
 
155
        self.assertEqual([],
 
156
            list(self.index_class(trans, 'pack-names', None).iter_all_entries()))
 
157
        tree.commit('foobarbaz')
 
158
        index = self.index_class(trans, 'pack-names', None)
 
159
        index_nodes = list(index.iter_all_entries())
 
160
        self.assertEqual(1, len(index_nodes))
 
161
        node = index_nodes[0]
 
162
        name = node[1][0]
 
163
        # the pack sizes should be listed in the index
 
164
        pack_value = node[2]
 
165
        sizes = [int(digits) for digits in pack_value.split(' ')]
 
166
        for size, suffix in zip(sizes, ['.rix', '.iix', '.tix', '.six']):
 
167
            stat = trans.stat('indices/%s%s' % (name, suffix))
 
168
            self.assertEqual(size, stat.st_size)
 
169
 
 
170
    def test_pulling_nothing_leads_to_no_new_names(self):
 
171
        format = self.get_format()
 
172
        tree1 = self.make_branch_and_tree('1', format=format)
 
173
        tree2 = self.make_branch_and_tree('2', format=format)
 
174
        tree1.branch.repository.fetch(tree2.branch.repository)
 
175
        trans = tree1.branch.repository.bzrdir.get_repository_transport(None)
 
176
        self.assertEqual([],
 
177
            list(self.index_class(trans, 'pack-names', None).iter_all_entries()))
 
178
 
 
179
    def test_commit_across_pack_shape_boundary_autopacks(self):
 
180
        format = self.get_format()
 
181
        tree = self.make_branch_and_tree('.', format=format)
 
182
        trans = tree.branch.repository.bzrdir.get_repository_transport(None)
 
183
        # This test could be a little cheaper by replacing the packs
 
184
        # attribute on the repository to allow a different pack distribution
 
185
        # and max packs policy - so we are checking the policy is honoured
 
186
        # in the test. But for now 11 commits is not a big deal in a single
 
187
        # test.
 
188
        for x in range(9):
 
189
            tree.commit('commit %s' % x)
 
190
        # there should be 9 packs:
 
191
        index = self.index_class(trans, 'pack-names', None)
 
192
        self.assertEqual(9, len(list(index.iter_all_entries())))
 
193
        # insert some files in obsolete_packs which should be removed by pack.
 
194
        trans.put_bytes('obsolete_packs/foo', '123')
 
195
        trans.put_bytes('obsolete_packs/bar', '321')
 
196
        # committing one more should coalesce to 1 of 10.
 
197
        tree.commit('commit triggering pack')
 
198
        index = self.index_class(trans, 'pack-names', None)
 
199
        self.assertEqual(1, len(list(index.iter_all_entries())))
 
200
        # packing should not damage data
 
201
        tree = tree.bzrdir.open_workingtree()
 
202
        check_result = tree.branch.repository.check(
 
203
            [tree.branch.last_revision()])
 
204
        # We should have 50 (10x5) files in the obsolete_packs directory.
 
205
        obsolete_files = list(trans.list_dir('obsolete_packs'))
 
206
        self.assertFalse('foo' in obsolete_files)
 
207
        self.assertFalse('bar' in obsolete_files)
 
208
        self.assertEqual(50, len(obsolete_files))
 
209
        # XXX: Todo check packs obsoleted correctly - old packs and indices
 
210
        # in the obsolete_packs directory.
 
211
        large_pack_name = list(index.iter_all_entries())[0][1][0]
 
212
        # finally, committing again should not touch the large pack.
 
213
        tree.commit('commit not triggering pack')
 
214
        index = self.index_class(trans, 'pack-names', None)
 
215
        self.assertEqual(2, len(list(index.iter_all_entries())))
 
216
        pack_names = [node[1][0] for node in index.iter_all_entries()]
 
217
        self.assertTrue(large_pack_name in pack_names)
 
218
 
 
219
    def test_fail_obsolete_deletion(self):
 
220
        # failing to delete obsolete packs is not fatal
 
221
        format = self.get_format()
 
222
        server = fakenfs.FakeNFSServer()
 
223
        server.setUp()
 
224
        self.addCleanup(server.tearDown)
 
225
        transport = get_transport(server.get_url())
 
226
        bzrdir = self.get_format().initialize_on_transport(transport)
 
227
        repo = bzrdir.create_repository()
 
228
        repo_transport = bzrdir.get_repository_transport(None)
 
229
        self.assertTrue(repo_transport.has('obsolete_packs'))
 
230
        # these files are in use by another client and typically can't be deleted
 
231
        repo_transport.put_bytes('obsolete_packs/.nfsblahblah', 'contents')
 
232
        repo._pack_collection._clear_obsolete_packs()
 
233
        self.assertTrue(repo_transport.has('obsolete_packs/.nfsblahblah'))
 
234
 
 
235
    def test_pack_after_two_commits_packs_everything(self):
 
236
        format = self.get_format()
 
237
        tree = self.make_branch_and_tree('.', format=format)
 
238
        trans = tree.branch.repository.bzrdir.get_repository_transport(None)
 
239
        tree.commit('start')
 
240
        tree.commit('more work')
 
241
        tree.branch.repository.pack()
 
242
        # there should be 1 pack:
 
243
        index = self.index_class(trans, 'pack-names', None)
 
244
        self.assertEqual(1, len(list(index.iter_all_entries())))
 
245
        self.assertEqual(2, len(tree.branch.repository.all_revision_ids()))
 
246
 
 
247
    def test_pack_layout(self):
 
248
        format = self.get_format()
 
249
        tree = self.make_branch_and_tree('.', format=format)
 
250
        trans = tree.branch.repository.bzrdir.get_repository_transport(None)
 
251
        tree.commit('start', rev_id='1')
 
252
        tree.commit('more work', rev_id='2')
 
253
        tree.branch.repository.pack()
 
254
        tree.lock_read()
 
255
        self.addCleanup(tree.unlock)
 
256
        pack = tree.branch.repository._pack_collection.get_pack_by_name(
 
257
            tree.branch.repository._pack_collection.names()[0])
 
258
        # revision access tends to be tip->ancestor, so ordering that way on 
 
259
        # disk is a good idea.
 
260
        for _1, key, val, refs in pack.revision_index.iter_all_entries():
 
261
            if key == ('1',):
 
262
                pos_1 = int(val[1:].split()[0])
 
263
            else:
 
264
                pos_2 = int(val[1:].split()[0])
 
265
        self.assertTrue(pos_2 < pos_1)
 
266
 
 
267
    def test_pack_repositories_support_multiple_write_locks(self):
 
268
        format = self.get_format()
 
269
        self.make_repository('.', shared=True, format=format)
 
270
        r1 = repository.Repository.open('.')
 
271
        r2 = repository.Repository.open('.')
 
272
        r1.lock_write()
 
273
        self.addCleanup(r1.unlock)
 
274
        r2.lock_write()
 
275
        r2.unlock()
 
276
 
 
277
    def _add_text(self, repo, fileid):
 
278
        """Add a text to the repository within a write group."""
 
279
        repo.texts.add_lines((fileid, 'samplerev+'+fileid), [], [])
 
280
 
 
281
    def test_concurrent_writers_merge_new_packs(self):
 
282
        format = self.get_format()
 
283
        self.make_repository('.', shared=True, format=format)
 
284
        r1 = repository.Repository.open('.')
 
285
        r2 = repository.Repository.open('.')
 
286
        r1.lock_write()
 
287
        try:
 
288
            # access enough data to load the names list
 
289
            list(r1.all_revision_ids())
 
290
            r2.lock_write()
 
291
            try:
 
292
                # access enough data to load the names list
 
293
                list(r2.all_revision_ids())
 
294
                r1.start_write_group()
 
295
                try:
 
296
                    r2.start_write_group()
 
297
                    try:
 
298
                        self._add_text(r1, 'fileidr1')
 
299
                        self._add_text(r2, 'fileidr2')
 
300
                    except:
 
301
                        r2.abort_write_group()
 
302
                        raise
 
303
                except:
 
304
                    r1.abort_write_group()
 
305
                    raise
 
306
                # both r1 and r2 have open write groups with data in them
 
307
                # created while the other's write group was open.
 
308
                # Commit both which requires a merge to the pack-names.
 
309
                try:
 
310
                    r1.commit_write_group()
 
311
                except:
 
312
                    r1.abort_write_group()
 
313
                    r2.abort_write_group()
 
314
                    raise
 
315
                r2.commit_write_group()
 
316
                # tell r1 to reload from disk
 
317
                r1._pack_collection.reset()
 
318
                # Now both repositories should know about both names
 
319
                r1._pack_collection.ensure_loaded()
 
320
                r2._pack_collection.ensure_loaded()
 
321
                self.assertEqual(r1._pack_collection.names(), r2._pack_collection.names())
 
322
                self.assertEqual(2, len(r1._pack_collection.names()))
 
323
            finally:
 
324
                r2.unlock()
 
325
        finally:
 
326
            r1.unlock()
 
327
 
 
328
    def test_concurrent_writer_second_preserves_dropping_a_pack(self):
 
329
        format = self.get_format()
 
330
        self.make_repository('.', shared=True, format=format)
 
331
        r1 = repository.Repository.open('.')
 
332
        r2 = repository.Repository.open('.')
 
333
        # add a pack to drop
 
334
        r1.lock_write()
 
335
        try:
 
336
            r1.start_write_group()
 
337
            try:
 
338
                self._add_text(r1, 'fileidr1')
 
339
            except:
 
340
                r1.abort_write_group()
 
341
                raise
 
342
            else:
 
343
                r1.commit_write_group()
 
344
            r1._pack_collection.ensure_loaded()
 
345
            name_to_drop = r1._pack_collection.all_packs()[0].name
 
346
        finally:
 
347
            r1.unlock()
 
348
        r1.lock_write()
 
349
        try:
 
350
            # access enough data to load the names list
 
351
            list(r1.all_revision_ids())
 
352
            r2.lock_write()
 
353
            try:
 
354
                # access enough data to load the names list
 
355
                list(r2.all_revision_ids())
 
356
                r1._pack_collection.ensure_loaded()
 
357
                try:
 
358
                    r2.start_write_group()
 
359
                    try:
 
360
                        # in r1, drop the pack
 
361
                        r1._pack_collection._remove_pack_from_memory(
 
362
                            r1._pack_collection.get_pack_by_name(name_to_drop))
 
363
                        # in r2, add a pack
 
364
                        self._add_text(r2, 'fileidr2')
 
365
                    except:
 
366
                        r2.abort_write_group()
 
367
                        raise
 
368
                except:
 
369
                    r1._pack_collection.reset()
 
370
                    raise
 
371
                # r1 has a changed names list, and r2 an open write groups with
 
372
                # changes.
 
373
                # save r1, and then commit the r2 write group, which requires a
 
374
                # merge to the pack-names, which should not reinstate
 
375
                # name_to_drop
 
376
                try:
 
377
                    r1._pack_collection._save_pack_names()
 
378
                    r1._pack_collection.reset()
 
379
                except:
 
380
                    r2.abort_write_group()
 
381
                    raise
 
382
                try:
 
383
                    r2.commit_write_group()
 
384
                except:
 
385
                    r2.abort_write_group()
 
386
                    raise
 
387
                # Now both repositories should now about just one name.
 
388
                r1._pack_collection.ensure_loaded()
 
389
                r2._pack_collection.ensure_loaded()
 
390
                self.assertEqual(r1._pack_collection.names(), r2._pack_collection.names())
 
391
                self.assertEqual(1, len(r1._pack_collection.names()))
 
392
                self.assertFalse(name_to_drop in r1._pack_collection.names())
 
393
            finally:
 
394
                r2.unlock()
 
395
        finally:
 
396
            r1.unlock()
 
397
 
 
398
    def test_lock_write_does_not_physically_lock(self):
 
399
        repo = self.make_repository('.', format=self.get_format())
 
400
        repo.lock_write()
 
401
        self.addCleanup(repo.unlock)
 
402
        self.assertFalse(repo.get_physical_lock_status())
 
403
 
 
404
    def prepare_for_break_lock(self):
 
405
        # Setup the global ui factory state so that a break-lock method call
 
406
        # will find usable input in the input stream.
 
407
        old_factory = ui.ui_factory
 
408
        def restoreFactory():
 
409
            ui.ui_factory = old_factory
 
410
        self.addCleanup(restoreFactory)
 
411
        ui.ui_factory = ui.SilentUIFactory()
 
412
        ui.ui_factory.stdin = StringIO("y\n")
 
413
 
 
414
    def test_break_lock_breaks_physical_lock(self):
 
415
        repo = self.make_repository('.', format=self.get_format())
 
416
        repo._pack_collection.lock_names()
 
417
        repo.control_files.leave_in_place()
 
418
        repo.unlock()
 
419
        repo2 = repository.Repository.open('.')
 
420
        self.assertTrue(repo.get_physical_lock_status())
 
421
        self.prepare_for_break_lock()
 
422
        repo2.break_lock()
 
423
        self.assertFalse(repo.get_physical_lock_status())
 
424
 
 
425
    def test_broken_physical_locks_error_on__unlock_names_lock(self):
 
426
        repo = self.make_repository('.', format=self.get_format())
 
427
        repo._pack_collection.lock_names()
 
428
        self.assertTrue(repo.get_physical_lock_status())
 
429
        repo2 = repository.Repository.open('.')
 
430
        self.prepare_for_break_lock()
 
431
        repo2.break_lock()
 
432
        self.assertRaises(errors.LockBroken, repo._pack_collection._unlock_names)
 
433
 
 
434
    def test_fetch_without_find_ghosts_ignores_ghosts(self):
 
435
        # we want two repositories at this point:
 
436
        # one with a revision that is a ghost in the other
 
437
        # repository.
 
438
        # 'ghost' is present in has_ghost, 'ghost' is absent in 'missing_ghost'.
 
439
        # 'references' is present in both repositories, and 'tip' is present
 
440
        # just in has_ghost.
 
441
        # has_ghost       missing_ghost
 
442
        #------------------------------
 
443
        # 'ghost'             -
 
444
        # 'references'    'references'
 
445
        # 'tip'               -
 
446
        # In this test we fetch 'tip' which should not fetch 'ghost'
 
447
        has_ghost = self.make_repository('has_ghost', format=self.get_format())
 
448
        missing_ghost = self.make_repository('missing_ghost',
 
449
            format=self.get_format())
 
450
 
 
451
        def add_commit(repo, revision_id, parent_ids):
 
452
            repo.lock_write()
 
453
            repo.start_write_group()
 
454
            inv = inventory.Inventory(revision_id=revision_id)
 
455
            inv.root.revision = revision_id
 
456
            root_id = inv.root.file_id
 
457
            sha1 = repo.add_inventory(revision_id, inv, [])
 
458
            repo.texts.add_lines((root_id, revision_id), [], [])
 
459
            rev = _mod_revision.Revision(timestamp=0,
 
460
                                         timezone=None,
 
461
                                         committer="Foo Bar <foo@example.com>",
 
462
                                         message="Message",
 
463
                                         inventory_sha1=sha1,
 
464
                                         revision_id=revision_id)
 
465
            rev.parent_ids = parent_ids
 
466
            repo.add_revision(revision_id, rev)
 
467
            repo.commit_write_group()
 
468
            repo.unlock()
 
469
        add_commit(has_ghost, 'ghost', [])
 
470
        add_commit(has_ghost, 'references', ['ghost'])
 
471
        add_commit(missing_ghost, 'references', ['ghost'])
 
472
        add_commit(has_ghost, 'tip', ['references'])
 
473
        missing_ghost.fetch(has_ghost, 'tip')
 
474
        # missing ghost now has tip and not ghost.
 
475
        rev = missing_ghost.get_revision('tip')
 
476
        inv = missing_ghost.get_inventory('tip')
 
477
        self.assertRaises(errors.NoSuchRevision,
 
478
            missing_ghost.get_revision, 'ghost')
 
479
        self.assertRaises(errors.NoSuchRevision,
 
480
            missing_ghost.get_inventory, 'ghost')
 
481
 
 
482
    def test_supports_external_lookups(self):
 
483
        repo = self.make_repository('.', format=self.get_format())
 
484
        self.assertEqual(self.format_supports_external_lookups,
 
485
            repo._format.supports_external_lookups)
 
486
 
 
487
 
 
488
class TestPackRepositoryStacking(TestCaseWithTransport):
 
489
 
 
490
    """Tests for stacking pack repositories"""
 
491
 
 
492
    def setUp(self):
 
493
        if not self.format_supports_external_lookups:
 
494
            raise TestNotApplicable("%r doesn't support stacking" 
 
495
                % (self.format_name,))
 
496
        super(TestPackRepositoryStacking, self).setUp()
 
497
 
 
498
    def get_format(self):
 
499
        return bzrdir.format_registry.make_bzrdir(self.format_name)
 
500
 
 
501
    def test_stack_checks_rich_root_compatibility(self):
 
502
        # early versions of the packing code relied on pack internals to
 
503
        # stack, but the current version should be able to stack on any
 
504
        # format.
 
505
        #
 
506
        # TODO: Possibly this should be run per-repository-format and raise
 
507
        # TestNotApplicable on formats that don't support stacking. -- mbp
 
508
        # 20080729
 
509
        repo = self.make_repository('repo', format=self.get_format())
 
510
        if repo.supports_rich_root():
 
511
            # can only stack on repositories that have compatible internal
 
512
            # metadata
 
513
            if getattr(repo._format, 'supports_tree_reference', False):
 
514
                matching_format_name = 'pack-0.92-subtree'
 
515
            else:
 
516
                matching_format_name = 'rich-root-pack'
 
517
            mismatching_format_name = 'pack-0.92'
 
518
        else:
 
519
            matching_format_name = 'pack-0.92'
 
520
            mismatching_format_name = 'pack-0.92-subtree'
 
521
        base = self.make_repository('base', format=matching_format_name)
 
522
        repo.add_fallback_repository(base)
 
523
        # you can't stack on something with incompatible data
 
524
        bad_repo = self.make_repository('mismatch',
 
525
            format=mismatching_format_name)
 
526
        e = self.assertRaises(errors.IncompatibleRepositories,
 
527
            repo.add_fallback_repository, bad_repo)
 
528
        self.assertContainsRe(str(e),
 
529
            r'(?m)KnitPackRepository.*/mismatch/.*\nis not compatible with\n'
 
530
            r'KnitPackRepository.*/repo/.*\n'
 
531
            r'different rich-root support')
 
532
 
 
533
    def test_stack_checks_serializers_compatibility(self):
 
534
        repo = self.make_repository('repo', format=self.get_format())
 
535
        if getattr(repo._format, 'supports_tree_reference', False):
 
536
            # can only stack on repositories that have compatible internal
 
537
            # metadata
 
538
            matching_format_name = 'pack-0.92-subtree'
 
539
            mismatching_format_name = 'rich-root-pack'
 
540
        else:
 
541
            if repo.supports_rich_root():
 
542
                matching_format_name = 'rich-root-pack'
 
543
                mismatching_format_name = 'pack-0.92-subtree'
 
544
            else:
 
545
                raise TestNotApplicable('No formats use non-v5 serializer'
 
546
                    ' without having rich-root also set')
 
547
        base = self.make_repository('base', format=matching_format_name)
 
548
        repo.add_fallback_repository(base)
 
549
        # you can't stack on something with incompatible data
 
550
        bad_repo = self.make_repository('mismatch',
 
551
            format=mismatching_format_name)
 
552
        e = self.assertRaises(errors.IncompatibleRepositories,
 
553
            repo.add_fallback_repository, bad_repo)
 
554
        self.assertContainsRe(str(e),
 
555
            r'(?m)KnitPackRepository.*/mismatch/.*\nis not compatible with\n'
 
556
            r'KnitPackRepository.*/repo/.*\n'
 
557
            r'different serializers')
 
558
 
 
559
    def test_adding_pack_does_not_record_pack_names_from_other_repositories(self):
 
560
        base = self.make_branch_and_tree('base', format=self.get_format())
 
561
        base.commit('foo')
 
562
        referencing = self.make_branch_and_tree('repo', format=self.get_format())
 
563
        referencing.branch.repository.add_fallback_repository(base.branch.repository)
 
564
        referencing.commit('bar')
 
565
        new_instance = referencing.bzrdir.open_repository()
 
566
        new_instance.lock_read()
 
567
        self.addCleanup(new_instance.unlock)
 
568
        new_instance._pack_collection.ensure_loaded()
 
569
        self.assertEqual(1, len(new_instance._pack_collection.all_packs()))
 
570
 
 
571
    def test_autopack_only_considers_main_repo_packs(self):
 
572
        base = self.make_branch_and_tree('base', format=self.get_format())
 
573
        base.commit('foo')
 
574
        tree = self.make_branch_and_tree('repo', format=self.get_format())
 
575
        tree.branch.repository.add_fallback_repository(base.branch.repository)
 
576
        trans = tree.branch.repository.bzrdir.get_repository_transport(None)
 
577
        # This test could be a little cheaper by replacing the packs
 
578
        # attribute on the repository to allow a different pack distribution
 
579
        # and max packs policy - so we are checking the policy is honoured
 
580
        # in the test. But for now 11 commits is not a big deal in a single
 
581
        # test.
 
582
        for x in range(9):
 
583
            tree.commit('commit %s' % x)
 
584
        # there should be 9 packs:
 
585
        index = self.index_class(trans, 'pack-names', None)
 
586
        self.assertEqual(9, len(list(index.iter_all_entries())))
 
587
        # committing one more should coalesce to 1 of 10.
 
588
        tree.commit('commit triggering pack')
 
589
        index = self.index_class(trans, 'pack-names', None)
 
590
        self.assertEqual(1, len(list(index.iter_all_entries())))
 
591
        # packing should not damage data
 
592
        tree = tree.bzrdir.open_workingtree()
 
593
        check_result = tree.branch.repository.check(
 
594
            [tree.branch.last_revision()])
 
595
        # We should have 50 (10x5) files in the obsolete_packs directory.
 
596
        obsolete_files = list(trans.list_dir('obsolete_packs'))
 
597
        self.assertFalse('foo' in obsolete_files)
 
598
        self.assertFalse('bar' in obsolete_files)
 
599
        self.assertEqual(50, len(obsolete_files))
 
600
        # XXX: Todo check packs obsoleted correctly - old packs and indices
 
601
        # in the obsolete_packs directory.
 
602
        large_pack_name = list(index.iter_all_entries())[0][1][0]
 
603
        # finally, committing again should not touch the large pack.
 
604
        tree.commit('commit not triggering pack')
 
605
        index = self.index_class(trans, 'pack-names', None)
 
606
        self.assertEqual(2, len(list(index.iter_all_entries())))
 
607
        pack_names = [node[1][0] for node in index.iter_all_entries()]
 
608
        self.assertTrue(large_pack_name in pack_names)
 
609
 
 
610
 
 
611
def load_tests(basic_tests, module, test_loader):
 
612
    # these give the bzrdir canned format name, and the repository on-disk
 
613
    # format string
 
614
    scenarios_params = [
 
615
         dict(format_name='pack-0.92',
 
616
              format_string="Bazaar pack repository format 1 (needs bzr 0.92)\n",
 
617
              format_supports_external_lookups=False,
 
618
              index_class=GraphIndex),
 
619
         dict(format_name='pack-0.92-subtree',
 
620
              format_string="Bazaar pack repository format 1 "
 
621
              "with subtree support (needs bzr 0.92)\n",
 
622
              format_supports_external_lookups=False,
 
623
              index_class=GraphIndex),
 
624
         dict(format_name='1.6',
 
625
              format_string="Bazaar RepositoryFormatKnitPack5 (bzr 1.6)\n",
 
626
              format_supports_external_lookups=True,
 
627
              index_class=GraphIndex),
 
628
         dict(format_name='1.6.1-rich-root',
 
629
              format_string="Bazaar RepositoryFormatKnitPack5RichRoot "
 
630
                  "(bzr 1.6.1)\n",
 
631
              format_supports_external_lookups=True,
 
632
              index_class=GraphIndex),
 
633
         dict(format_name='development2',
 
634
              format_string="Bazaar development format 2 "
 
635
                  "(needs bzr.dev from before 1.8)\n",
 
636
              format_supports_external_lookups=True,
 
637
              index_class=BTreeGraphIndex),
 
638
         dict(format_name='development2-subtree',
 
639
              format_string="Bazaar development format 2 "
 
640
                  "with subtree support (needs bzr.dev from before 1.8)\n",
 
641
              format_supports_external_lookups=True,
 
642
              index_class=BTreeGraphIndex),
 
643
         ]
 
644
    adapter = tests.TestScenarioApplier()
 
645
    # name of the scenario is the format name
 
646
    adapter.scenarios = [(s['format_name'], s) for s in scenarios_params]
 
647
    suite = tests.TestSuite()
 
648
    tests.adapt_tests(basic_tests, adapter, suite)
 
649
    return suite