/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_repository.py

  • Committer: Robert Collins
  • Date: 2007-04-19 02:27:44 UTC
  • mto: This revision was merged to the branch mainline in revision 2426.
  • Revision ID: robertc@robertcollins.net-20070419022744-pfdqz42kp1wizh43
``make docs`` now creates a man page at ``man1/bzr.1`` fixing bug 107388.
(Robert Collins)

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006-2011 Canonical Ltd
 
1
# Copyright (C) 2006, 2007 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
12
12
#
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
 
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
 
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
16
16
 
17
17
"""Tests for the Repository facility that are not interface tests.
18
18
 
19
 
For interface tests see tests/per_repository/*.py.
 
19
For interface tests see tests/repository_implementations/*.py.
20
20
 
21
21
For concrete class tests see this file, and for storage formats tests
22
22
also see this file.
23
23
"""
24
24
 
25
25
from stat import S_ISDIR
 
26
from StringIO import StringIO
26
27
 
 
28
from bzrlib import symbol_versioning
27
29
import bzrlib
28
 
from bzrlib.errors import (
29
 
    UnknownFormatError,
30
 
    UnsupportedFormatError,
31
 
    )
32
 
from bzrlib import (
33
 
    btree_index,
34
 
    graph,
35
 
    symbol_versioning,
36
 
    tests,
37
 
    transport,
38
 
    )
39
 
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
40
 
from bzrlib.index import GraphIndex
 
30
import bzrlib.bzrdir as bzrdir
 
31
import bzrlib.errors as errors
 
32
from bzrlib.errors import (NotBranchError,
 
33
                           NoSuchFile,
 
34
                           UnknownFormatError,
 
35
                           UnsupportedFormatError,
 
36
                           )
41
37
from bzrlib.repository import RepositoryFormat
42
 
from bzrlib.tests import (
43
 
    TestCase,
44
 
    TestCaseWithTransport,
45
 
    )
 
38
from bzrlib.tests import TestCase, TestCaseWithTransport
 
39
from bzrlib.transport import get_transport
 
40
from bzrlib.transport.memory import MemoryServer
46
41
from bzrlib import (
47
 
    bzrdir,
48
 
    errors,
49
 
    inventory,
50
 
    osutils,
51
42
    repository,
52
 
    revision as _mod_revision,
53
43
    upgrade,
54
 
    versionedfile,
55
44
    workingtree,
56
45
    )
57
 
from bzrlib.repofmt import (
58
 
    groupcompress_repo,
59
 
    knitrepo,
60
 
    knitpack_repo,
61
 
    pack_repo,
62
 
    )
 
46
from bzrlib.repofmt import knitrepo, weaverepo
63
47
 
64
48
 
65
49
class TestDefaultFormat(TestCase):
67
51
    def test_get_set_default_format(self):
68
52
        old_default = bzrdir.format_registry.get('default')
69
53
        private_default = old_default().repository_format.__class__
70
 
        old_format = repository.format_registry.get_default()
 
54
        old_format = repository.RepositoryFormat.get_default_format()
71
55
        self.assertTrue(isinstance(old_format, private_default))
72
56
        def make_sample_bzrdir():
73
57
            my_bzrdir = bzrdir.BzrDirMetaFormat1()
85
69
            self.assertEqual(result, 'A bzr repository dir')
86
70
        finally:
87
71
            bzrdir.format_registry.remove('default')
88
 
            bzrdir.format_registry.remove('sample')
89
72
            bzrdir.format_registry.register('default', old_default, '')
90
 
        self.assertIsInstance(repository.format_registry.get_default(),
 
73
        self.assertIsInstance(repository.RepositoryFormat.get_default_format(),
91
74
                              old_format.__class__)
92
75
 
93
76
 
94
77
class SampleRepositoryFormat(repository.RepositoryFormat):
95
78
    """A sample format
96
79
 
97
 
    this format is initializable, unsupported to aid in testing the
 
80
    this format is initializable, unsupported to aid in testing the 
98
81
    open and open(unsupported=True) routines.
99
82
    """
100
83
 
115
98
        return "opened repository."
116
99
 
117
100
 
118
 
class SampleExtraRepositoryFormat(repository.RepositoryFormat):
119
 
    """A sample format that can not be used in a metadir
120
 
 
121
 
    """
122
 
 
123
 
    def get_format_string(self):
124
 
        raise NotImplementedError
125
 
 
126
 
 
127
101
class TestRepositoryFormat(TestCaseWithTransport):
128
102
    """Tests for the Repository format detection used by the bzr meta dir facility.BzrBranchFormat facility."""
129
103
 
130
104
    def test_find_format(self):
131
105
        # is the right format object found for a repository?
132
106
        # create a branch with a few known format objects.
133
 
        # this is not quite the same as
 
107
        # this is not quite the same as 
134
108
        self.build_tree(["foo/", "bar/"])
135
109
        def check_format(format, url):
136
110
            dir = format._matchingbzrdir.initialize(url)
137
111
            format.initialize(dir)
138
 
            t = transport.get_transport(url)
 
112
            t = get_transport(url)
139
113
            found_format = repository.RepositoryFormat.find_format(dir)
140
 
            self.assertIsInstance(found_format, format.__class__)
141
 
        check_format(repository.format_registry.get_default(), "bar")
142
 
 
 
114
            self.failUnless(isinstance(found_format, format.__class__))
 
115
        check_format(weaverepo.RepositoryFormat7(), "bar")
 
116
        
143
117
    def test_find_format_no_repository(self):
144
118
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
145
119
        self.assertRaises(errors.NoRepositoryPresent,
154
128
                          dir)
155
129
 
156
130
    def test_register_unregister_format(self):
157
 
        # Test deprecated format registration functions
158
131
        format = SampleRepositoryFormat()
159
132
        # make a control dir
160
133
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
161
134
        # make a repo
162
135
        format.initialize(dir)
163
136
        # register a format for it.
164
 
        self.applyDeprecated(symbol_versioning.deprecated_in((2, 4, 0)),
165
 
            repository.RepositoryFormat.register_format, format)
 
137
        repository.RepositoryFormat.register_format(format)
166
138
        # which repository.Open will refuse (not supported)
167
 
        self.assertRaises(UnsupportedFormatError, repository.Repository.open,
168
 
            self.get_url())
 
139
        self.assertRaises(UnsupportedFormatError, repository.Repository.open, self.get_url())
169
140
        # but open(unsupported) will work
170
141
        self.assertEqual(format.open(dir), "opened repository.")
171
142
        # unregister the format
172
 
        self.applyDeprecated(symbol_versioning.deprecated_in((2, 4, 0)),
173
 
            repository.RepositoryFormat.unregister_format, format)
174
 
 
175
 
 
176
 
class TestRepositoryFormatRegistry(TestCase):
177
 
 
178
 
    def setUp(self):
179
 
        super(TestRepositoryFormatRegistry, self).setUp()
180
 
        self.registry = repository.RepositoryFormatRegistry()
181
 
 
182
 
    def test_register_unregister_format(self):
183
 
        format = SampleRepositoryFormat()
184
 
        self.registry.register(format)
185
 
        self.assertEquals(format, self.registry.get("Sample .bzr repository format."))
186
 
        self.registry.remove(format)
187
 
        self.assertRaises(KeyError, self.registry.get, "Sample .bzr repository format.")
188
 
 
189
 
    def test_get_all(self):
190
 
        format = SampleRepositoryFormat()
191
 
        self.assertEquals([], self.registry._get_all())
192
 
        self.registry.register(format)
193
 
        self.assertEquals([format], self.registry._get_all())
194
 
 
195
 
    def test_register_extra(self):
196
 
        format = SampleExtraRepositoryFormat()
197
 
        self.assertEquals([], self.registry._get_all())
198
 
        self.registry.register_extra(format)
199
 
        self.assertEquals([format], self.registry._get_all())
200
 
 
201
 
    def test_register_extra_lazy(self):
202
 
        self.assertEquals([], self.registry._get_all())
203
 
        self.registry.register_extra_lazy("bzrlib.tests.test_repository",
204
 
            "SampleExtraRepositoryFormat")
205
 
        formats = self.registry._get_all()
206
 
        self.assertEquals(1, len(formats))
207
 
        self.assertIsInstance(formats[0], SampleExtraRepositoryFormat)
 
143
        repository.RepositoryFormat.unregister_format(format)
 
144
 
 
145
 
 
146
class TestFormat6(TestCaseWithTransport):
 
147
 
 
148
    def test_no_ancestry_weave(self):
 
149
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
150
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
151
        # We no longer need to create the ancestry.weave file
 
152
        # since it is *never* used.
 
153
        self.assertRaises(NoSuchFile,
 
154
                          control.transport.get,
 
155
                          'ancestry.weave')
 
156
 
 
157
 
 
158
class TestFormat7(TestCaseWithTransport):
 
159
    
 
160
    def test_disk_layout(self):
 
161
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
162
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
163
        # in case of side effects of locking.
 
164
        repo.lock_write()
 
165
        repo.unlock()
 
166
        # we want:
 
167
        # format 'Bazaar-NG Repository format 7'
 
168
        # lock ''
 
169
        # inventory.weave == empty_weave
 
170
        # empty revision-store directory
 
171
        # empty weaves directory
 
172
        t = control.get_repository_transport(None)
 
173
        self.assertEqualDiff('Bazaar-NG Repository format 7',
 
174
                             t.get('format').read())
 
175
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
 
176
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
 
177
        self.assertEqualDiff('# bzr weave file v5\n'
 
178
                             'w\n'
 
179
                             'W\n',
 
180
                             t.get('inventory.weave').read())
 
181
 
 
182
    def test_shared_disk_layout(self):
 
183
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
184
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
185
        # we want:
 
186
        # format 'Bazaar-NG Repository format 7'
 
187
        # inventory.weave == empty_weave
 
188
        # empty revision-store directory
 
189
        # empty weaves directory
 
190
        # a 'shared-storage' marker file.
 
191
        # lock is not present when unlocked
 
192
        t = control.get_repository_transport(None)
 
193
        self.assertEqualDiff('Bazaar-NG Repository format 7',
 
194
                             t.get('format').read())
 
195
        self.assertEqualDiff('', t.get('shared-storage').read())
 
196
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
 
197
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
 
198
        self.assertEqualDiff('# bzr weave file v5\n'
 
199
                             'w\n'
 
200
                             'W\n',
 
201
                             t.get('inventory.weave').read())
 
202
        self.assertFalse(t.has('branch-lock'))
 
203
 
 
204
    def test_creates_lockdir(self):
 
205
        """Make sure it appears to be controlled by a LockDir existence"""
 
206
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
207
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
208
        t = control.get_repository_transport(None)
 
209
        # TODO: Should check there is a 'lock' toplevel directory, 
 
210
        # regardless of contents
 
211
        self.assertFalse(t.has('lock/held/info'))
 
212
        repo.lock_write()
 
213
        try:
 
214
            self.assertTrue(t.has('lock/held/info'))
 
215
        finally:
 
216
            # unlock so we don't get a warning about failing to do so
 
217
            repo.unlock()
 
218
 
 
219
    def test_uses_lockdir(self):
 
220
        """repo format 7 actually locks on lockdir"""
 
221
        base_url = self.get_url()
 
222
        control = bzrdir.BzrDirMetaFormat1().initialize(base_url)
 
223
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
224
        t = control.get_repository_transport(None)
 
225
        repo.lock_write()
 
226
        repo.unlock()
 
227
        del repo
 
228
        # make sure the same lock is created by opening it
 
229
        repo = repository.Repository.open(base_url)
 
230
        repo.lock_write()
 
231
        self.assertTrue(t.has('lock/held/info'))
 
232
        repo.unlock()
 
233
        self.assertFalse(t.has('lock/held/info'))
 
234
 
 
235
    def test_shared_no_tree_disk_layout(self):
 
236
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
237
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
238
        repo.set_make_working_trees(False)
 
239
        # we want:
 
240
        # format 'Bazaar-NG Repository format 7'
 
241
        # lock ''
 
242
        # inventory.weave == empty_weave
 
243
        # empty revision-store directory
 
244
        # empty weaves directory
 
245
        # a 'shared-storage' marker file.
 
246
        t = control.get_repository_transport(None)
 
247
        self.assertEqualDiff('Bazaar-NG Repository format 7',
 
248
                             t.get('format').read())
 
249
        ## self.assertEqualDiff('', t.get('lock').read())
 
250
        self.assertEqualDiff('', t.get('shared-storage').read())
 
251
        self.assertEqualDiff('', t.get('no-working-trees').read())
 
252
        repo.set_make_working_trees(True)
 
253
        self.assertFalse(t.has('no-working-trees'))
 
254
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
 
255
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
 
256
        self.assertEqualDiff('# bzr weave file v5\n'
 
257
                             'w\n'
 
258
                             'W\n',
 
259
                             t.get('inventory.weave').read())
208
260
 
209
261
 
210
262
class TestFormatKnit1(TestCaseWithTransport):
211
 
 
212
 
    def test_attribute__fetch_order(self):
213
 
        """Knits need topological data insertion."""
214
 
        repo = self.make_repository('.',
215
 
                format=bzrdir.format_registry.get('knit')())
216
 
        self.assertEqual('topological', repo._format._fetch_order)
217
 
 
218
 
    def test_attribute__fetch_uses_deltas(self):
219
 
        """Knits reuse deltas."""
220
 
        repo = self.make_repository('.',
221
 
                format=bzrdir.format_registry.get('knit')())
222
 
        self.assertEqual(True, repo._format._fetch_uses_deltas)
223
 
 
 
263
    
224
264
    def test_disk_layout(self):
225
265
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
226
266
        repo = knitrepo.RepositoryFormatKnit1().initialize(control)
240
280
        # self.assertEqualDiff('', t.get('lock').read())
241
281
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
242
282
        self.check_knits(t)
243
 
        # Check per-file knits.
244
 
        branch = control.create_branch()
245
 
        tree = control.create_workingtree()
246
 
        tree.add(['foo'], ['Nasty-IdC:'], ['file'])
247
 
        tree.put_file_bytes_non_atomic('Nasty-IdC:', '')
248
 
        tree.commit('1st post', rev_id='foo')
249
 
        self.assertHasKnit(t, 'knits/e8/%254easty-%2549d%2543%253a',
250
 
            '\nfoo fulltext 0 81  :')
251
283
 
252
 
    def assertHasKnit(self, t, knit_name, extra_content=''):
 
284
    def assertHasKnit(self, t, knit_name):
253
285
        """Assert that knit_name exists on t."""
254
 
        self.assertEqualDiff('# bzr knit index 8\n' + extra_content,
 
286
        self.assertEqualDiff('# bzr knit index 8\n',
255
287
                             t.get(knit_name + '.kndx').read())
 
288
        # no default content
 
289
        self.assertTrue(t.has(knit_name + '.knit'))
256
290
 
257
291
    def check_knits(self, t):
258
292
        """check knit content for a repository."""
302
336
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
303
337
        self.check_knits(t)
304
338
 
305
 
    def test_deserialise_sets_root_revision(self):
306
 
        """We must have a inventory.root.revision
307
 
 
308
 
        Old versions of the XML5 serializer did not set the revision_id for
309
 
        the whole inventory. So we grab the one from the expected text. Which
310
 
        is valid when the api is not being abused.
311
 
        """
312
 
        repo = self.make_repository('.',
313
 
                format=bzrdir.format_registry.get('knit')())
314
 
        inv_xml = '<inventory format="5">\n</inventory>\n'
315
 
        inv = repo._deserialise_inventory('test-rev-id', inv_xml)
316
 
        self.assertEqual('test-rev-id', inv.root.revision)
317
 
 
318
 
    def test_deserialise_uses_global_revision_id(self):
319
 
        """If it is set, then we re-use the global revision id"""
320
 
        repo = self.make_repository('.',
321
 
                format=bzrdir.format_registry.get('knit')())
322
 
        inv_xml = ('<inventory format="5" revision_id="other-rev-id">\n'
323
 
                   '</inventory>\n')
324
 
        # Arguably, the deserialise_inventory should detect a mismatch, and
325
 
        # raise an error, rather than silently using one revision_id over the
326
 
        # other.
327
 
        self.assertRaises(AssertionError, repo._deserialise_inventory,
328
 
            'test-rev-id', inv_xml)
329
 
        inv = repo._deserialise_inventory('other-rev-id', inv_xml)
330
 
        self.assertEqual('other-rev-id', inv.root.revision)
331
 
 
332
 
    def test_supports_external_lookups(self):
333
 
        repo = self.make_repository('.',
334
 
                format=bzrdir.format_registry.get('knit')())
335
 
        self.assertFalse(repo._format.supports_external_lookups)
336
 
 
337
339
 
338
340
class DummyRepository(object):
339
341
    """A dummy repository for testing."""
340
342
 
341
 
    _format = None
342
343
    _serializer = None
343
344
 
344
345
    def supports_rich_root(self):
345
 
        if self._format is not None:
346
 
            return self._format.rich_root_data
347
346
        return False
348
347
 
349
 
    def get_graph(self):
350
 
        raise NotImplementedError
351
 
 
352
 
    def get_parent_map(self, revision_ids):
353
 
        raise NotImplementedError
354
 
 
355
348
 
356
349
class InterDummy(repository.InterRepository):
357
350
    """An inter-repository optimised code path for DummyRepository.
358
351
 
359
352
    This is for use during testing where we use DummyRepository as repositories
360
353
    so that none of the default regsitered inter-repository classes will
361
 
    MATCH.
 
354
    match.
362
355
    """
363
356
 
364
357
    @staticmethod
365
358
    def is_compatible(repo_source, repo_target):
366
359
        """InterDummy is compatible with DummyRepository."""
367
 
        return (isinstance(repo_source, DummyRepository) and
 
360
        return (isinstance(repo_source, DummyRepository) and 
368
361
            isinstance(repo_target, DummyRepository))
369
362
 
370
363
 
383
376
 
384
377
    def assertGetsDefaultInterRepository(self, repo_a, repo_b):
385
378
        """Asserts that InterRepository.get(repo_a, repo_b) -> the default.
386
 
 
 
379
        
387
380
        The effective default is now InterSameDataRepository because there is
388
381
        no actual sane default in the presence of incompatible data models.
389
382
        """
400
393
        # pair that it returns true on for the is_compatible static method
401
394
        # check
402
395
        dummy_a = DummyRepository()
403
 
        dummy_a._format = RepositoryFormat()
404
396
        dummy_b = DummyRepository()
405
 
        dummy_b._format = RepositoryFormat()
406
397
        repo = self.make_repository('.')
407
398
        # hack dummies to look like repo somewhat.
408
399
        dummy_a._serializer = repo._serializer
409
 
        dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
410
 
        dummy_a._format.rich_root_data = repo._format.rich_root_data
411
400
        dummy_b._serializer = repo._serializer
412
 
        dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
413
 
        dummy_b._format.rich_root_data = repo._format.rich_root_data
414
401
        repository.InterRepository.register_optimiser(InterDummy)
415
402
        try:
416
403
            # we should get the default for something InterDummy returns False
429
416
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
430
417
 
431
418
 
432
 
class TestRepositoryFormat1(knitrepo.RepositoryFormatKnit1):
433
 
 
434
 
    def get_format_string(self):
435
 
        return "Test Format 1"
436
 
 
437
 
 
438
 
class TestRepositoryFormat2(knitrepo.RepositoryFormatKnit1):
439
 
 
440
 
    def get_format_string(self):
441
 
        return "Test Format 2"
 
419
class TestInterWeaveRepo(TestCaseWithTransport):
 
420
 
 
421
    def test_is_compatible_and_registered(self):
 
422
        # InterWeaveRepo is compatible when either side
 
423
        # is a format 5/6/7 branch
 
424
        from bzrlib.repofmt import knitrepo, weaverepo
 
425
        formats = [weaverepo.RepositoryFormat5(),
 
426
                   weaverepo.RepositoryFormat6(),
 
427
                   weaverepo.RepositoryFormat7()]
 
428
        incompatible_formats = [weaverepo.RepositoryFormat4(),
 
429
                                knitrepo.RepositoryFormatKnit1(),
 
430
                                ]
 
431
        repo_a = self.make_repository('a')
 
432
        repo_b = self.make_repository('b')
 
433
        is_compatible = repository.InterWeaveRepo.is_compatible
 
434
        for source in incompatible_formats:
 
435
            # force incompatible left then right
 
436
            repo_a._format = source
 
437
            repo_b._format = formats[0]
 
438
            self.assertFalse(is_compatible(repo_a, repo_b))
 
439
            self.assertFalse(is_compatible(repo_b, repo_a))
 
440
        for source in formats:
 
441
            repo_a._format = source
 
442
            for target in formats:
 
443
                repo_b._format = target
 
444
                self.assertTrue(is_compatible(repo_a, repo_b))
 
445
        self.assertEqual(repository.InterWeaveRepo,
 
446
                         repository.InterRepository.get(repo_a,
 
447
                                                        repo_b).__class__)
442
448
 
443
449
 
444
450
class TestRepositoryConverter(TestCaseWithTransport):
445
451
 
446
452
    def test_convert_empty(self):
447
 
        source_format = TestRepositoryFormat1()
448
 
        target_format = TestRepositoryFormat2()
449
 
        repository.format_registry.register(source_format)
450
 
        self.addCleanup(repository.format_registry.remove,
451
 
            source_format)
452
 
        repository.format_registry.register(target_format)
453
 
        self.addCleanup(repository.format_registry.remove,
454
 
            target_format)
455
 
        t = self.get_transport()
 
453
        t = get_transport(self.get_url('.'))
456
454
        t.mkdir('repository')
457
455
        repo_dir = bzrdir.BzrDirMetaFormat1().initialize('repository')
458
 
        repo = TestRepositoryFormat1().initialize(repo_dir)
 
456
        repo = weaverepo.RepositoryFormat7().initialize(repo_dir)
 
457
        target_format = knitrepo.RepositoryFormatKnit1()
459
458
        converter = repository.CopyConverter(target_format)
460
459
        pb = bzrlib.ui.ui_factory.nested_progress_bar()
461
460
        try:
466
465
        self.assertTrue(isinstance(target_format, repo._format.__class__))
467
466
 
468
467
 
 
468
class TestMisc(TestCase):
 
469
    
 
470
    def test_unescape_xml(self):
 
471
        """We get some kind of error when malformed entities are passed"""
 
472
        self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;') 
 
473
 
 
474
 
469
475
class TestRepositoryFormatKnit3(TestCaseWithTransport):
470
476
 
471
 
    def test_attribute__fetch_order(self):
472
 
        """Knits need topological data insertion."""
473
 
        format = bzrdir.BzrDirMetaFormat1()
474
 
        format.repository_format = knitrepo.RepositoryFormatKnit3()
475
 
        repo = self.make_repository('.', format=format)
476
 
        self.assertEqual('topological', repo._format._fetch_order)
477
 
 
478
 
    def test_attribute__fetch_uses_deltas(self):
479
 
        """Knits reuse deltas."""
480
 
        format = bzrdir.BzrDirMetaFormat1()
481
 
        format.repository_format = knitrepo.RepositoryFormatKnit3()
482
 
        repo = self.make_repository('.', format=format)
483
 
        self.assertEqual(True, repo._format._fetch_uses_deltas)
484
 
 
485
477
    def test_convert(self):
486
478
        """Ensure the upgrade adds weaves for roots"""
487
479
        format = bzrdir.BzrDirMetaFormat1()
489
481
        tree = self.make_branch_and_tree('.', format)
490
482
        tree.commit("Dull commit", rev_id="dull")
491
483
        revision_tree = tree.branch.repository.revision_tree('dull')
492
 
        revision_tree.lock_read()
493
 
        try:
494
 
            self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
495
 
                revision_tree.inventory.root.file_id)
496
 
        finally:
497
 
            revision_tree.unlock()
 
484
        self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
 
485
            revision_tree.inventory.root.file_id)
498
486
        format = bzrdir.BzrDirMetaFormat1()
499
487
        format.repository_format = knitrepo.RepositoryFormatKnit3()
500
488
        upgrade.Convert('.', format)
501
489
        tree = workingtree.WorkingTree.open('.')
502
490
        revision_tree = tree.branch.repository.revision_tree('dull')
503
 
        revision_tree.lock_read()
504
 
        try:
505
 
            revision_tree.get_file_lines(revision_tree.inventory.root.file_id)
506
 
        finally:
507
 
            revision_tree.unlock()
 
491
        revision_tree.get_file_lines(revision_tree.inventory.root.file_id)
508
492
        tree.commit("Another dull commit", rev_id='dull2')
509
493
        revision_tree = tree.branch.repository.revision_tree('dull2')
510
 
        revision_tree.lock_read()
511
 
        self.addCleanup(revision_tree.unlock)
512
494
        self.assertEqual('dull', revision_tree.inventory.root.revision)
513
495
 
514
 
    def test_supports_external_lookups(self):
515
 
        format = bzrdir.BzrDirMetaFormat1()
516
 
        format.repository_format = knitrepo.RepositoryFormatKnit3()
517
 
        repo = self.make_repository('.', format=format)
518
 
        self.assertFalse(repo._format.supports_external_lookups)
519
 
 
520
 
 
521
 
class Test2a(tests.TestCaseWithMemoryTransport):
522
 
 
523
 
    def test_chk_bytes_uses_custom_btree_parser(self):
524
 
        mt = self.make_branch_and_memory_tree('test', format='2a')
525
 
        mt.lock_write()
526
 
        self.addCleanup(mt.unlock)
527
 
        mt.add([''], ['root-id'])
528
 
        mt.commit('first')
529
 
        index = mt.branch.repository.chk_bytes._index._graph_index._indices[0]
530
 
        self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
531
 
        # It should also work if we re-open the repo
532
 
        repo = mt.branch.repository.bzrdir.open_repository()
533
 
        repo.lock_read()
534
 
        self.addCleanup(repo.unlock)
535
 
        index = repo.chk_bytes._index._graph_index._indices[0]
536
 
        self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
537
 
 
538
 
    def test_fetch_combines_groups(self):
539
 
        builder = self.make_branch_builder('source', format='2a')
540
 
        builder.start_series()
541
 
        builder.build_snapshot('1', None, [
542
 
            ('add', ('', 'root-id', 'directory', '')),
543
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
544
 
        builder.build_snapshot('2', ['1'], [
545
 
            ('modify', ('file-id', 'content-2\n'))])
546
 
        builder.finish_series()
547
 
        source = builder.get_branch()
548
 
        target = self.make_repository('target', format='2a')
549
 
        target.fetch(source.repository)
550
 
        target.lock_read()
551
 
        self.addCleanup(target.unlock)
552
 
        details = target.texts._index.get_build_details(
553
 
            [('file-id', '1',), ('file-id', '2',)])
554
 
        file_1_details = details[('file-id', '1')]
555
 
        file_2_details = details[('file-id', '2')]
556
 
        # The index, and what to read off disk, should be the same for both
557
 
        # versions of the file.
558
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
559
 
 
560
 
    def test_fetch_combines_groups(self):
561
 
        builder = self.make_branch_builder('source', format='2a')
562
 
        builder.start_series()
563
 
        builder.build_snapshot('1', None, [
564
 
            ('add', ('', 'root-id', 'directory', '')),
565
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
566
 
        builder.build_snapshot('2', ['1'], [
567
 
            ('modify', ('file-id', 'content-2\n'))])
568
 
        builder.finish_series()
569
 
        source = builder.get_branch()
570
 
        target = self.make_repository('target', format='2a')
571
 
        target.fetch(source.repository)
572
 
        target.lock_read()
573
 
        self.addCleanup(target.unlock)
574
 
        details = target.texts._index.get_build_details(
575
 
            [('file-id', '1',), ('file-id', '2',)])
576
 
        file_1_details = details[('file-id', '1')]
577
 
        file_2_details = details[('file-id', '2')]
578
 
        # The index, and what to read off disk, should be the same for both
579
 
        # versions of the file.
580
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
581
 
 
582
 
    def test_fetch_combines_groups(self):
583
 
        builder = self.make_branch_builder('source', format='2a')
584
 
        builder.start_series()
585
 
        builder.build_snapshot('1', None, [
586
 
            ('add', ('', 'root-id', 'directory', '')),
587
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
588
 
        builder.build_snapshot('2', ['1'], [
589
 
            ('modify', ('file-id', 'content-2\n'))])
590
 
        builder.finish_series()
591
 
        source = builder.get_branch()
592
 
        target = self.make_repository('target', format='2a')
593
 
        target.fetch(source.repository)
594
 
        target.lock_read()
595
 
        self.addCleanup(target.unlock)
596
 
        details = target.texts._index.get_build_details(
597
 
            [('file-id', '1',), ('file-id', '2',)])
598
 
        file_1_details = details[('file-id', '1')]
599
 
        file_2_details = details[('file-id', '2')]
600
 
        # The index, and what to read off disk, should be the same for both
601
 
        # versions of the file.
602
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
603
 
 
604
 
    def test_format_pack_compresses_True(self):
605
 
        repo = self.make_repository('repo', format='2a')
606
 
        self.assertTrue(repo._format.pack_compresses)
607
 
 
608
 
    def test_inventories_use_chk_map_with_parent_base_dict(self):
609
 
        tree = self.make_branch_and_memory_tree('repo', format="2a")
610
 
        tree.lock_write()
611
 
        tree.add([''], ['TREE_ROOT'])
612
 
        revid = tree.commit("foo")
613
 
        tree.unlock()
614
 
        tree.lock_read()
615
 
        self.addCleanup(tree.unlock)
616
 
        inv = tree.branch.repository.get_inventory(revid)
617
 
        self.assertNotEqual(None, inv.parent_id_basename_to_file_id)
618
 
        inv.parent_id_basename_to_file_id._ensure_root()
619
 
        inv.id_to_entry._ensure_root()
620
 
        self.assertEqual(65536, inv.id_to_entry._root_node.maximum_size)
621
 
        self.assertEqual(65536,
622
 
            inv.parent_id_basename_to_file_id._root_node.maximum_size)
623
 
 
624
 
    def test_autopack_unchanged_chk_nodes(self):
625
 
        # at 20 unchanged commits, chk pages are packed that are split into
626
 
        # two groups such that the new pack being made doesn't have all its
627
 
        # pages in the source packs (though they are in the repository).
628
 
        # Use a memory backed repository, we don't need to hit disk for this
629
 
        tree = self.make_branch_and_memory_tree('tree', format='2a')
630
 
        tree.lock_write()
631
 
        self.addCleanup(tree.unlock)
632
 
        tree.add([''], ['TREE_ROOT'])
633
 
        for pos in range(20):
634
 
            tree.commit(str(pos))
635
 
 
636
 
    def test_pack_with_hint(self):
637
 
        tree = self.make_branch_and_memory_tree('tree', format='2a')
638
 
        tree.lock_write()
639
 
        self.addCleanup(tree.unlock)
640
 
        tree.add([''], ['TREE_ROOT'])
641
 
        # 1 commit to leave untouched
642
 
        tree.commit('1')
643
 
        to_keep = tree.branch.repository._pack_collection.names()
644
 
        # 2 to combine
645
 
        tree.commit('2')
646
 
        tree.commit('3')
647
 
        all = tree.branch.repository._pack_collection.names()
648
 
        combine = list(set(all) - set(to_keep))
649
 
        self.assertLength(3, all)
650
 
        self.assertLength(2, combine)
651
 
        tree.branch.repository.pack(hint=combine)
652
 
        final = tree.branch.repository._pack_collection.names()
653
 
        self.assertLength(2, final)
654
 
        self.assertFalse(combine[0] in final)
655
 
        self.assertFalse(combine[1] in final)
656
 
        self.assertSubset(to_keep, final)
657
 
 
658
 
    def test_stream_source_to_gc(self):
659
 
        source = self.make_repository('source', format='2a')
660
 
        target = self.make_repository('target', format='2a')
661
 
        stream = source._get_source(target._format)
662
 
        self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
663
 
 
664
 
    def test_stream_source_to_non_gc(self):
665
 
        source = self.make_repository('source', format='2a')
666
 
        target = self.make_repository('target', format='rich-root-pack')
667
 
        stream = source._get_source(target._format)
668
 
        # We don't want the child GroupCHKStreamSource
669
 
        self.assertIs(type(stream), repository.StreamSource)
670
 
 
671
 
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
672
 
        source_builder = self.make_branch_builder('source',
673
 
                            format='2a')
674
 
        # We have to build a fairly large tree, so that we are sure the chk
675
 
        # pages will have split into multiple pages.
676
 
        entries = [('add', ('', 'a-root-id', 'directory', None))]
677
 
        for i in 'abcdefghijklmnopqrstuvwxyz123456789':
678
 
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
679
 
                fname = i + j
680
 
                fid = fname + '-id'
681
 
                content = 'content for %s\n' % (fname,)
682
 
                entries.append(('add', (fname, fid, 'file', content)))
683
 
        source_builder.start_series()
684
 
        source_builder.build_snapshot('rev-1', None, entries)
685
 
        # Now change a few of them, so we get a few new pages for the second
686
 
        # revision
687
 
        source_builder.build_snapshot('rev-2', ['rev-1'], [
688
 
            ('modify', ('aa-id', 'new content for aa-id\n')),
689
 
            ('modify', ('cc-id', 'new content for cc-id\n')),
690
 
            ('modify', ('zz-id', 'new content for zz-id\n')),
691
 
            ])
692
 
        source_builder.finish_series()
693
 
        source_branch = source_builder.get_branch()
694
 
        source_branch.lock_read()
695
 
        self.addCleanup(source_branch.unlock)
696
 
        target = self.make_repository('target', format='2a')
697
 
        source = source_branch.repository._get_source(target._format)
698
 
        self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
699
 
 
700
 
        # On a regular pass, getting the inventories and chk pages for rev-2
701
 
        # would only get the newly created chk pages
702
 
        search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1,
703
 
                                    set(['rev-2']))
704
 
        simple_chk_records = []
705
 
        for vf_name, substream in source.get_stream(search):
706
 
            if vf_name == 'chk_bytes':
707
 
                for record in substream:
708
 
                    simple_chk_records.append(record.key)
709
 
            else:
710
 
                for _ in substream:
711
 
                    continue
712
 
        # 3 pages, the root (InternalNode), + 2 pages which actually changed
713
 
        self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
714
 
                          ('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
715
 
                          ('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
716
 
                          ('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
717
 
                         simple_chk_records)
718
 
        # Now, when we do a similar call using 'get_stream_for_missing_keys'
719
 
        # we should get a much larger set of pages.
720
 
        missing = [('inventories', 'rev-2')]
721
 
        full_chk_records = []
722
 
        for vf_name, substream in source.get_stream_for_missing_keys(missing):
723
 
            if vf_name == 'inventories':
724
 
                for record in substream:
725
 
                    self.assertEqual(('rev-2',), record.key)
726
 
            elif vf_name == 'chk_bytes':
727
 
                for record in substream:
728
 
                    full_chk_records.append(record.key)
729
 
            else:
730
 
                self.fail('Should not be getting a stream of %s' % (vf_name,))
731
 
        # We have 257 records now. This is because we have 1 root page, and 256
732
 
        # leaf pages in a complete listing.
733
 
        self.assertEqual(257, len(full_chk_records))
734
 
        self.assertSubset(simple_chk_records, full_chk_records)
735
 
 
736
 
    def test_inconsistency_fatal(self):
737
 
        repo = self.make_repository('repo', format='2a')
738
 
        self.assertTrue(repo.revisions._index._inconsistency_fatal)
739
 
        self.assertFalse(repo.texts._index._inconsistency_fatal)
740
 
        self.assertFalse(repo.inventories._index._inconsistency_fatal)
741
 
        self.assertFalse(repo.signatures._index._inconsistency_fatal)
742
 
        self.assertFalse(repo.chk_bytes._index._inconsistency_fatal)
743
 
 
744
 
 
745
 
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport):
746
 
 
747
 
    def test_source_to_exact_pack_092(self):
748
 
        source = self.make_repository('source', format='pack-0.92')
749
 
        target = self.make_repository('target', format='pack-0.92')
750
 
        stream_source = source._get_source(target._format)
751
 
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
752
 
 
753
 
    def test_source_to_exact_pack_rich_root_pack(self):
754
 
        source = self.make_repository('source', format='rich-root-pack')
755
 
        target = self.make_repository('target', format='rich-root-pack')
756
 
        stream_source = source._get_source(target._format)
757
 
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
758
 
 
759
 
    def test_source_to_exact_pack_19(self):
760
 
        source = self.make_repository('source', format='1.9')
761
 
        target = self.make_repository('target', format='1.9')
762
 
        stream_source = source._get_source(target._format)
763
 
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
764
 
 
765
 
    def test_source_to_exact_pack_19_rich_root(self):
766
 
        source = self.make_repository('source', format='1.9-rich-root')
767
 
        target = self.make_repository('target', format='1.9-rich-root')
768
 
        stream_source = source._get_source(target._format)
769
 
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
770
 
 
771
 
    def test_source_to_remote_exact_pack_19(self):
772
 
        trans = self.make_smart_server('target')
773
 
        trans.ensure_base()
774
 
        source = self.make_repository('source', format='1.9')
775
 
        target = self.make_repository('target', format='1.9')
776
 
        target = repository.Repository.open(trans.base)
777
 
        stream_source = source._get_source(target._format)
778
 
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
779
 
 
780
 
    def test_stream_source_to_non_exact(self):
781
 
        source = self.make_repository('source', format='pack-0.92')
782
 
        target = self.make_repository('target', format='1.9')
783
 
        stream = source._get_source(target._format)
784
 
        self.assertIs(type(stream), repository.StreamSource)
785
 
 
786
 
    def test_stream_source_to_non_exact_rich_root(self):
787
 
        source = self.make_repository('source', format='1.9')
788
 
        target = self.make_repository('target', format='1.9-rich-root')
789
 
        stream = source._get_source(target._format)
790
 
        self.assertIs(type(stream), repository.StreamSource)
791
 
 
792
 
    def test_source_to_remote_non_exact_pack_19(self):
793
 
        trans = self.make_smart_server('target')
794
 
        trans.ensure_base()
795
 
        source = self.make_repository('source', format='1.9')
796
 
        target = self.make_repository('target', format='1.6')
797
 
        target = repository.Repository.open(trans.base)
798
 
        stream_source = source._get_source(target._format)
799
 
        self.assertIs(type(stream_source), repository.StreamSource)
800
 
 
801
 
    def test_stream_source_to_knit(self):
802
 
        source = self.make_repository('source', format='pack-0.92')
803
 
        target = self.make_repository('target', format='dirstate')
804
 
        stream = source._get_source(target._format)
805
 
        self.assertIs(type(stream), repository.StreamSource)
806
 
 
807
 
 
808
 
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
809
 
    """Tests for _find_parent_ids_of_revisions."""
810
 
 
811
 
    def setUp(self):
812
 
        super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
813
 
        self.builder = self.make_branch_builder('source')
814
 
        self.builder.start_series()
815
 
        self.builder.build_snapshot('initial', None,
816
 
            [('add', ('', 'tree-root', 'directory', None))])
817
 
        self.repo = self.builder.get_branch().repository
818
 
        self.addCleanup(self.builder.finish_series)
819
 
 
820
 
    def assertParentIds(self, expected_result, rev_set):
821
 
        self.assertEqual(sorted(expected_result),
822
 
            sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
823
 
 
824
 
    def test_simple(self):
825
 
        self.builder.build_snapshot('revid1', None, [])
826
 
        self.builder.build_snapshot('revid2', ['revid1'], [])
827
 
        rev_set = ['revid2']
828
 
        self.assertParentIds(['revid1'], rev_set)
829
 
 
830
 
    def test_not_first_parent(self):
831
 
        self.builder.build_snapshot('revid1', None, [])
832
 
        self.builder.build_snapshot('revid2', ['revid1'], [])
833
 
        self.builder.build_snapshot('revid3', ['revid2'], [])
834
 
        rev_set = ['revid3', 'revid2']
835
 
        self.assertParentIds(['revid1'], rev_set)
836
 
 
837
 
    def test_not_null(self):
838
 
        rev_set = ['initial']
839
 
        self.assertParentIds([], rev_set)
840
 
 
841
 
    def test_not_null_set(self):
842
 
        self.builder.build_snapshot('revid1', None, [])
843
 
        rev_set = [_mod_revision.NULL_REVISION]
844
 
        self.assertParentIds([], rev_set)
845
 
 
846
 
    def test_ghost(self):
847
 
        self.builder.build_snapshot('revid1', None, [])
848
 
        rev_set = ['ghost', 'revid1']
849
 
        self.assertParentIds(['initial'], rev_set)
850
 
 
851
 
    def test_ghost_parent(self):
852
 
        self.builder.build_snapshot('revid1', None, [])
853
 
        self.builder.build_snapshot('revid2', ['revid1', 'ghost'], [])
854
 
        rev_set = ['revid2', 'revid1']
855
 
        self.assertParentIds(['ghost', 'initial'], rev_set)
856
 
 
857
 
    def test_righthand_parent(self):
858
 
        self.builder.build_snapshot('revid1', None, [])
859
 
        self.builder.build_snapshot('revid2a', ['revid1'], [])
860
 
        self.builder.build_snapshot('revid2b', ['revid1'], [])
861
 
        self.builder.build_snapshot('revid3', ['revid2a', 'revid2b'], [])
862
 
        rev_set = ['revid3', 'revid2a']
863
 
        self.assertParentIds(['revid1', 'revid2b'], rev_set)
864
 
 
865
 
 
866
 
class TestWithBrokenRepo(TestCaseWithTransport):
867
 
    """These tests seem to be more appropriate as interface tests?"""
868
 
 
869
 
    def make_broken_repository(self):
870
 
        # XXX: This function is borrowed from Aaron's "Reconcile can fix bad
871
 
        # parent references" branch which is due to land in bzr.dev soon.  Once
872
 
        # it does, this duplication should be removed.
873
 
        repo = self.make_repository('broken-repo')
874
 
        cleanups = []
875
 
        try:
876
 
            repo.lock_write()
877
 
            cleanups.append(repo.unlock)
878
 
            repo.start_write_group()
879
 
            cleanups.append(repo.commit_write_group)
880
 
            # make rev1a: A well-formed revision, containing 'file1'
881
 
            inv = inventory.Inventory(revision_id='rev1a')
882
 
            inv.root.revision = 'rev1a'
883
 
            self.add_file(repo, inv, 'file1', 'rev1a', [])
884
 
            repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
885
 
            repo.add_inventory('rev1a', inv, [])
886
 
            revision = _mod_revision.Revision('rev1a',
887
 
                committer='jrandom@example.com', timestamp=0,
888
 
                inventory_sha1='', timezone=0, message='foo', parent_ids=[])
889
 
            repo.add_revision('rev1a',revision, inv)
890
 
 
891
 
            # make rev1b, which has no Revision, but has an Inventory, and
892
 
            # file1
893
 
            inv = inventory.Inventory(revision_id='rev1b')
894
 
            inv.root.revision = 'rev1b'
895
 
            self.add_file(repo, inv, 'file1', 'rev1b', [])
896
 
            repo.add_inventory('rev1b', inv, [])
897
 
 
898
 
            # make rev2, with file1 and file2
899
 
            # file2 is sane
900
 
            # file1 has 'rev1b' as an ancestor, even though this is not
901
 
            # mentioned by 'rev1a', making it an unreferenced ancestor
902
 
            inv = inventory.Inventory()
903
 
            self.add_file(repo, inv, 'file1', 'rev2', ['rev1a', 'rev1b'])
904
 
            self.add_file(repo, inv, 'file2', 'rev2', [])
905
 
            self.add_revision(repo, 'rev2', inv, ['rev1a'])
906
 
 
907
 
            # make ghost revision rev1c
908
 
            inv = inventory.Inventory()
909
 
            self.add_file(repo, inv, 'file2', 'rev1c', [])
910
 
 
911
 
            # make rev3 with file2
912
 
            # file2 refers to 'rev1c', which is a ghost in this repository, so
913
 
            # file2 cannot have rev1c as its ancestor.
914
 
            inv = inventory.Inventory()
915
 
            self.add_file(repo, inv, 'file2', 'rev3', ['rev1c'])
916
 
            self.add_revision(repo, 'rev3', inv, ['rev1c'])
917
 
            return repo
918
 
        finally:
919
 
            for cleanup in reversed(cleanups):
920
 
                cleanup()
921
 
 
922
 
    def add_revision(self, repo, revision_id, inv, parent_ids):
923
 
        inv.revision_id = revision_id
924
 
        inv.root.revision = revision_id
925
 
        repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
926
 
        repo.add_inventory(revision_id, inv, parent_ids)
927
 
        revision = _mod_revision.Revision(revision_id,
928
 
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
929
 
            timezone=0, message='foo', parent_ids=parent_ids)
930
 
        repo.add_revision(revision_id,revision, inv)
931
 
 
932
 
    def add_file(self, repo, inv, filename, revision, parents):
933
 
        file_id = filename + '-id'
934
 
        entry = inventory.InventoryFile(file_id, filename, 'TREE_ROOT')
935
 
        entry.revision = revision
936
 
        entry.text_size = 0
937
 
        inv.add(entry)
938
 
        text_key = (file_id, revision)
939
 
        parent_keys = [(file_id, parent) for parent in parents]
940
 
        repo.texts.add_lines(text_key, parent_keys, ['line\n'])
941
 
 
942
 
    def test_insert_from_broken_repo(self):
943
 
        """Inserting a data stream from a broken repository won't silently
944
 
        corrupt the target repository.
945
 
        """
946
 
        broken_repo = self.make_broken_repository()
947
 
        empty_repo = self.make_repository('empty-repo')
948
 
        try:
949
 
            empty_repo.fetch(broken_repo)
950
 
        except (errors.RevisionNotPresent, errors.BzrCheckError):
951
 
            # Test successful: compression parent not being copied leads to
952
 
            # error.
953
 
            return
954
 
        empty_repo.lock_read()
955
 
        self.addCleanup(empty_repo.unlock)
956
 
        text = empty_repo.texts.get_record_stream(
957
 
            [('file2-id', 'rev3')], 'topological', True).next()
958
 
        self.assertEqual('line\n', text.get_bytes_as('fulltext'))
959
 
 
960
 
 
961
 
class TestRepositoryPackCollection(TestCaseWithTransport):
962
 
 
963
 
    def get_format(self):
964
 
        return bzrdir.format_registry.make_bzrdir('pack-0.92')
965
 
 
966
 
    def get_packs(self):
967
 
        format = self.get_format()
968
 
        repo = self.make_repository('.', format=format)
969
 
        return repo._pack_collection
970
 
 
971
 
    def make_packs_and_alt_repo(self, write_lock=False):
972
 
        """Create a pack repo with 3 packs, and access it via a second repo."""
973
 
        tree = self.make_branch_and_tree('.', format=self.get_format())
974
 
        tree.lock_write()
975
 
        self.addCleanup(tree.unlock)
976
 
        rev1 = tree.commit('one')
977
 
        rev2 = tree.commit('two')
978
 
        rev3 = tree.commit('three')
979
 
        r = repository.Repository.open('.')
980
 
        if write_lock:
981
 
            r.lock_write()
982
 
        else:
983
 
            r.lock_read()
984
 
        self.addCleanup(r.unlock)
985
 
        packs = r._pack_collection
986
 
        packs.ensure_loaded()
987
 
        return tree, r, packs, [rev1, rev2, rev3]
988
 
 
989
 
    def test__clear_obsolete_packs(self):
990
 
        packs = self.get_packs()
991
 
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
992
 
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
993
 
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
994
 
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
995
 
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
996
 
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
997
 
        res = packs._clear_obsolete_packs()
998
 
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
999
 
        self.assertEqual([], obsolete_pack_trans.list_dir('.'))
1000
 
 
1001
 
    def test__clear_obsolete_packs_preserve(self):
1002
 
        packs = self.get_packs()
1003
 
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1004
 
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1005
 
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1006
 
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1007
 
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1008
 
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1009
 
        res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
1010
 
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1011
 
        self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
1012
 
                         sorted(obsolete_pack_trans.list_dir('.')))
1013
 
 
1014
 
    def test__max_pack_count(self):
1015
 
        """The maximum pack count is a function of the number of revisions."""
1016
 
        # no revisions - one pack, so that we can have a revision free repo
1017
 
        # without it blowing up
1018
 
        packs = self.get_packs()
1019
 
        self.assertEqual(1, packs._max_pack_count(0))
1020
 
        # after that the sum of the digits, - check the first 1-9
1021
 
        self.assertEqual(1, packs._max_pack_count(1))
1022
 
        self.assertEqual(2, packs._max_pack_count(2))
1023
 
        self.assertEqual(3, packs._max_pack_count(3))
1024
 
        self.assertEqual(4, packs._max_pack_count(4))
1025
 
        self.assertEqual(5, packs._max_pack_count(5))
1026
 
        self.assertEqual(6, packs._max_pack_count(6))
1027
 
        self.assertEqual(7, packs._max_pack_count(7))
1028
 
        self.assertEqual(8, packs._max_pack_count(8))
1029
 
        self.assertEqual(9, packs._max_pack_count(9))
1030
 
        # check the boundary cases with two digits for the next decade
1031
 
        self.assertEqual(1, packs._max_pack_count(10))
1032
 
        self.assertEqual(2, packs._max_pack_count(11))
1033
 
        self.assertEqual(10, packs._max_pack_count(19))
1034
 
        self.assertEqual(2, packs._max_pack_count(20))
1035
 
        self.assertEqual(3, packs._max_pack_count(21))
1036
 
        # check some arbitrary big numbers
1037
 
        self.assertEqual(25, packs._max_pack_count(112894))
1038
 
 
1039
 
    def test_repr(self):
1040
 
        packs = self.get_packs()
1041
 
        self.assertContainsRe(repr(packs),
1042
 
            'RepositoryPackCollection(.*Repository(.*))')
1043
 
 
1044
 
    def test__obsolete_packs(self):
1045
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1046
 
        names = packs.names()
1047
 
        pack = packs.get_pack_by_name(names[0])
1048
 
        # Schedule this one for removal
1049
 
        packs._remove_pack_from_memory(pack)
1050
 
        # Simulate a concurrent update by renaming the .pack file and one of
1051
 
        # the indices
1052
 
        packs.transport.rename('packs/%s.pack' % (names[0],),
1053
 
                               'obsolete_packs/%s.pack' % (names[0],))
1054
 
        packs.transport.rename('indices/%s.iix' % (names[0],),
1055
 
                               'obsolete_packs/%s.iix' % (names[0],))
1056
 
        # Now trigger the obsoletion, and ensure that all the remaining files
1057
 
        # are still renamed
1058
 
        packs._obsolete_packs([pack])
1059
 
        self.assertEqual([n + '.pack' for n in names[1:]],
1060
 
                         sorted(packs._pack_transport.list_dir('.')))
1061
 
        # names[0] should not be present in the index anymore
1062
 
        self.assertEqual(names[1:],
1063
 
            sorted(set([osutils.splitext(n)[0] for n in
1064
 
                        packs._index_transport.list_dir('.')])))
1065
 
 
1066
 
    def test_pack_distribution_zero(self):
1067
 
        packs = self.get_packs()
1068
 
        self.assertEqual([0], packs.pack_distribution(0))
1069
 
 
1070
 
    def test_ensure_loaded_unlocked(self):
1071
 
        packs = self.get_packs()
1072
 
        self.assertRaises(errors.ObjectNotLocked,
1073
 
                          packs.ensure_loaded)
1074
 
 
1075
 
    def test_pack_distribution_one_to_nine(self):
1076
 
        packs = self.get_packs()
1077
 
        self.assertEqual([1],
1078
 
            packs.pack_distribution(1))
1079
 
        self.assertEqual([1, 1],
1080
 
            packs.pack_distribution(2))
1081
 
        self.assertEqual([1, 1, 1],
1082
 
            packs.pack_distribution(3))
1083
 
        self.assertEqual([1, 1, 1, 1],
1084
 
            packs.pack_distribution(4))
1085
 
        self.assertEqual([1, 1, 1, 1, 1],
1086
 
            packs.pack_distribution(5))
1087
 
        self.assertEqual([1, 1, 1, 1, 1, 1],
1088
 
            packs.pack_distribution(6))
1089
 
        self.assertEqual([1, 1, 1, 1, 1, 1, 1],
1090
 
            packs.pack_distribution(7))
1091
 
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1],
1092
 
            packs.pack_distribution(8))
1093
 
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1, 1],
1094
 
            packs.pack_distribution(9))
1095
 
 
1096
 
    def test_pack_distribution_stable_at_boundaries(self):
1097
 
        """When there are multi-rev packs the counts are stable."""
1098
 
        packs = self.get_packs()
1099
 
        # in 10s:
1100
 
        self.assertEqual([10], packs.pack_distribution(10))
1101
 
        self.assertEqual([10, 1], packs.pack_distribution(11))
1102
 
        self.assertEqual([10, 10], packs.pack_distribution(20))
1103
 
        self.assertEqual([10, 10, 1], packs.pack_distribution(21))
1104
 
        # 100s
1105
 
        self.assertEqual([100], packs.pack_distribution(100))
1106
 
        self.assertEqual([100, 1], packs.pack_distribution(101))
1107
 
        self.assertEqual([100, 10, 1], packs.pack_distribution(111))
1108
 
        self.assertEqual([100, 100], packs.pack_distribution(200))
1109
 
        self.assertEqual([100, 100, 1], packs.pack_distribution(201))
1110
 
        self.assertEqual([100, 100, 10, 1], packs.pack_distribution(211))
1111
 
 
1112
 
    def test_plan_pack_operations_2009_revisions_skip_all_packs(self):
1113
 
        packs = self.get_packs()
1114
 
        existing_packs = [(2000, "big"), (9, "medium")]
1115
 
        # rev count - 2009 -> 2x1000 + 9x1
1116
 
        pack_operations = packs.plan_autopack_combinations(
1117
 
            existing_packs, [1000, 1000, 1, 1, 1, 1, 1, 1, 1, 1, 1])
1118
 
        self.assertEqual([], pack_operations)
1119
 
 
1120
 
    def test_plan_pack_operations_2010_revisions_skip_all_packs(self):
1121
 
        packs = self.get_packs()
1122
 
        existing_packs = [(2000, "big"), (9, "medium"), (1, "single")]
1123
 
        # rev count - 2010 -> 2x1000 + 1x10
1124
 
        pack_operations = packs.plan_autopack_combinations(
1125
 
            existing_packs, [1000, 1000, 10])
1126
 
        self.assertEqual([], pack_operations)
1127
 
 
1128
 
    def test_plan_pack_operations_2010_combines_smallest_two(self):
1129
 
        packs = self.get_packs()
1130
 
        existing_packs = [(1999, "big"), (9, "medium"), (1, "single2"),
1131
 
            (1, "single1")]
1132
 
        # rev count - 2010 -> 2x1000 + 1x10 (3)
1133
 
        pack_operations = packs.plan_autopack_combinations(
1134
 
            existing_packs, [1000, 1000, 10])
1135
 
        self.assertEqual([[2, ["single2", "single1"]]], pack_operations)
1136
 
 
1137
 
    def test_plan_pack_operations_creates_a_single_op(self):
1138
 
        packs = self.get_packs()
1139
 
        existing_packs = [(50, 'a'), (40, 'b'), (30, 'c'), (10, 'd'),
1140
 
                          (10, 'e'), (6, 'f'), (4, 'g')]
1141
 
        # rev count 150 -> 1x100 and 5x10
1142
 
        # The two size 10 packs do not need to be touched. The 50, 40, 30 would
1143
 
        # be combined into a single 120 size pack, and the 6 & 4 would
1144
 
        # becombined into a size 10 pack. However, if we have to rewrite them,
1145
 
        # we save a pack file with no increased I/O by putting them into the
1146
 
        # same file.
1147
 
        distribution = packs.pack_distribution(150)
1148
 
        pack_operations = packs.plan_autopack_combinations(existing_packs,
1149
 
                                                           distribution)
1150
 
        self.assertEqual([[130, ['a', 'b', 'c', 'f', 'g']]], pack_operations)
1151
 
 
1152
 
    def test_all_packs_none(self):
1153
 
        format = self.get_format()
1154
 
        tree = self.make_branch_and_tree('.', format=format)
1155
 
        tree.lock_read()
1156
 
        self.addCleanup(tree.unlock)
1157
 
        packs = tree.branch.repository._pack_collection
1158
 
        packs.ensure_loaded()
1159
 
        self.assertEqual([], packs.all_packs())
1160
 
 
1161
 
    def test_all_packs_one(self):
1162
 
        format = self.get_format()
1163
 
        tree = self.make_branch_and_tree('.', format=format)
1164
 
        tree.commit('start')
1165
 
        tree.lock_read()
1166
 
        self.addCleanup(tree.unlock)
1167
 
        packs = tree.branch.repository._pack_collection
1168
 
        packs.ensure_loaded()
1169
 
        self.assertEqual([
1170
 
            packs.get_pack_by_name(packs.names()[0])],
1171
 
            packs.all_packs())
1172
 
 
1173
 
    def test_all_packs_two(self):
1174
 
        format = self.get_format()
1175
 
        tree = self.make_branch_and_tree('.', format=format)
1176
 
        tree.commit('start')
1177
 
        tree.commit('continue')
1178
 
        tree.lock_read()
1179
 
        self.addCleanup(tree.unlock)
1180
 
        packs = tree.branch.repository._pack_collection
1181
 
        packs.ensure_loaded()
1182
 
        self.assertEqual([
1183
 
            packs.get_pack_by_name(packs.names()[0]),
1184
 
            packs.get_pack_by_name(packs.names()[1]),
1185
 
            ], packs.all_packs())
1186
 
 
1187
 
    def test_get_pack_by_name(self):
1188
 
        format = self.get_format()
1189
 
        tree = self.make_branch_and_tree('.', format=format)
1190
 
        tree.commit('start')
1191
 
        tree.lock_read()
1192
 
        self.addCleanup(tree.unlock)
1193
 
        packs = tree.branch.repository._pack_collection
1194
 
        packs.reset()
1195
 
        packs.ensure_loaded()
1196
 
        name = packs.names()[0]
1197
 
        pack_1 = packs.get_pack_by_name(name)
1198
 
        # the pack should be correctly initialised
1199
 
        sizes = packs._names[name]
1200
 
        rev_index = GraphIndex(packs._index_transport, name + '.rix', sizes[0])
1201
 
        inv_index = GraphIndex(packs._index_transport, name + '.iix', sizes[1])
1202
 
        txt_index = GraphIndex(packs._index_transport, name + '.tix', sizes[2])
1203
 
        sig_index = GraphIndex(packs._index_transport, name + '.six', sizes[3])
1204
 
        self.assertEqual(pack_repo.ExistingPack(packs._pack_transport,
1205
 
            name, rev_index, inv_index, txt_index, sig_index), pack_1)
1206
 
        # and the same instance should be returned on successive calls.
1207
 
        self.assertTrue(pack_1 is packs.get_pack_by_name(name))
1208
 
 
1209
 
    def test_reload_pack_names_new_entry(self):
1210
 
        tree, r, packs, revs = self.make_packs_and_alt_repo()
1211
 
        names = packs.names()
1212
 
        # Add a new pack file into the repository
1213
 
        rev4 = tree.commit('four')
1214
 
        new_names = tree.branch.repository._pack_collection.names()
1215
 
        new_name = set(new_names).difference(names)
1216
 
        self.assertEqual(1, len(new_name))
1217
 
        new_name = new_name.pop()
1218
 
        # The old collection hasn't noticed yet
1219
 
        self.assertEqual(names, packs.names())
1220
 
        self.assertTrue(packs.reload_pack_names())
1221
 
        self.assertEqual(new_names, packs.names())
1222
 
        # And the repository can access the new revision
1223
 
        self.assertEqual({rev4:(revs[-1],)}, r.get_parent_map([rev4]))
1224
 
        self.assertFalse(packs.reload_pack_names())
1225
 
 
1226
 
    def test_reload_pack_names_added_and_removed(self):
1227
 
        tree, r, packs, revs = self.make_packs_and_alt_repo()
1228
 
        names = packs.names()
1229
 
        # Now repack the whole thing
1230
 
        tree.branch.repository.pack()
1231
 
        new_names = tree.branch.repository._pack_collection.names()
1232
 
        # The other collection hasn't noticed yet
1233
 
        self.assertEqual(names, packs.names())
1234
 
        self.assertTrue(packs.reload_pack_names())
1235
 
        self.assertEqual(new_names, packs.names())
1236
 
        self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
1237
 
        self.assertFalse(packs.reload_pack_names())
1238
 
 
1239
 
    def test_reload_pack_names_preserves_pending(self):
1240
 
        # TODO: Update this to also test for pending-deleted names
1241
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1242
 
        # We will add one pack (via start_write_group + insert_record_stream),
1243
 
        # and remove another pack (via _remove_pack_from_memory)
1244
 
        orig_names = packs.names()
1245
 
        orig_at_load = packs._packs_at_load
1246
 
        to_remove_name = iter(orig_names).next()
1247
 
        r.start_write_group()
1248
 
        self.addCleanup(r.abort_write_group)
1249
 
        r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
1250
 
            ('text', 'rev'), (), None, 'content\n')])
1251
 
        new_pack = packs._new_pack
1252
 
        self.assertTrue(new_pack.data_inserted())
1253
 
        new_pack.finish()
1254
 
        packs.allocate(new_pack)
1255
 
        packs._new_pack = None
1256
 
        removed_pack = packs.get_pack_by_name(to_remove_name)
1257
 
        packs._remove_pack_from_memory(removed_pack)
1258
 
        names = packs.names()
1259
 
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1260
 
        new_names = set([x[0][0] for x in new_nodes])
1261
 
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1262
 
        self.assertEqual(set(names) - set(orig_names), new_names)
1263
 
        self.assertEqual(set([new_pack.name]), new_names)
1264
 
        self.assertEqual([to_remove_name],
1265
 
                         sorted([x[0][0] for x in deleted_nodes]))
1266
 
        packs.reload_pack_names()
1267
 
        reloaded_names = packs.names()
1268
 
        self.assertEqual(orig_at_load, packs._packs_at_load)
1269
 
        self.assertEqual(names, reloaded_names)
1270
 
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1271
 
        new_names = set([x[0][0] for x in new_nodes])
1272
 
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1273
 
        self.assertEqual(set(names) - set(orig_names), new_names)
1274
 
        self.assertEqual(set([new_pack.name]), new_names)
1275
 
        self.assertEqual([to_remove_name],
1276
 
                         sorted([x[0][0] for x in deleted_nodes]))
1277
 
 
1278
 
    def test_autopack_obsoletes_new_pack(self):
1279
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1280
 
        packs._max_pack_count = lambda x: 1
1281
 
        packs.pack_distribution = lambda x: [10]
1282
 
        r.start_write_group()
1283
 
        r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
1284
 
            ('bogus-rev',), (), None, 'bogus-content\n')])
1285
 
        # This should trigger an autopack, which will combine everything into a
1286
 
        # single pack file.
1287
 
        new_names = r.commit_write_group()
1288
 
        names = packs.names()
1289
 
        self.assertEqual(1, len(names))
1290
 
        self.assertEqual([names[0] + '.pack'],
1291
 
                         packs._pack_transport.list_dir('.'))
1292
 
 
1293
 
    def test_autopack_reloads_and_stops(self):
1294
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1295
 
        # After we have determined what needs to be autopacked, trigger a
1296
 
        # full-pack via the other repo which will cause us to re-evaluate and
1297
 
        # decide we don't need to do anything
1298
 
        orig_execute = packs._execute_pack_operations
1299
 
        def _munged_execute_pack_ops(*args, **kwargs):
1300
 
            tree.branch.repository.pack()
1301
 
            return orig_execute(*args, **kwargs)
1302
 
        packs._execute_pack_operations = _munged_execute_pack_ops
1303
 
        packs._max_pack_count = lambda x: 1
1304
 
        packs.pack_distribution = lambda x: [10]
1305
 
        self.assertFalse(packs.autopack())
1306
 
        self.assertEqual(1, len(packs.names()))
1307
 
        self.assertEqual(tree.branch.repository._pack_collection.names(),
1308
 
                         packs.names())
1309
 
 
1310
 
    def test__save_pack_names(self):
1311
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1312
 
        names = packs.names()
1313
 
        pack = packs.get_pack_by_name(names[0])
1314
 
        packs._remove_pack_from_memory(pack)
1315
 
        packs._save_pack_names(obsolete_packs=[pack])
1316
 
        cur_packs = packs._pack_transport.list_dir('.')
1317
 
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1318
 
        # obsolete_packs will also have stuff like .rix and .iix present.
1319
 
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
1320
 
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1321
 
        self.assertEqual([pack.name], sorted(obsolete_names))
1322
 
 
1323
 
    def test__save_pack_names_already_obsoleted(self):
1324
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1325
 
        names = packs.names()
1326
 
        pack = packs.get_pack_by_name(names[0])
1327
 
        packs._remove_pack_from_memory(pack)
1328
 
        # We are going to simulate a concurrent autopack by manually obsoleting
1329
 
        # the pack directly.
1330
 
        packs._obsolete_packs([pack])
1331
 
        packs._save_pack_names(clear_obsolete_packs=True,
1332
 
                               obsolete_packs=[pack])
1333
 
        cur_packs = packs._pack_transport.list_dir('.')
1334
 
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1335
 
        # Note that while we set clear_obsolete_packs=True, it should not
1336
 
        # delete a pack file that we have also scheduled for obsoletion.
1337
 
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
1338
 
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1339
 
        self.assertEqual([pack.name], sorted(obsolete_names))
1340
 
 
1341
 
 
1342
 
 
1343
 
class TestPack(TestCaseWithTransport):
1344
 
    """Tests for the Pack object."""
1345
 
 
1346
 
    def assertCurrentlyEqual(self, left, right):
1347
 
        self.assertTrue(left == right)
1348
 
        self.assertTrue(right == left)
1349
 
        self.assertFalse(left != right)
1350
 
        self.assertFalse(right != left)
1351
 
 
1352
 
    def assertCurrentlyNotEqual(self, left, right):
1353
 
        self.assertFalse(left == right)
1354
 
        self.assertFalse(right == left)
1355
 
        self.assertTrue(left != right)
1356
 
        self.assertTrue(right != left)
1357
 
 
1358
 
    def test___eq____ne__(self):
1359
 
        left = pack_repo.ExistingPack('', '', '', '', '', '')
1360
 
        right = pack_repo.ExistingPack('', '', '', '', '', '')
1361
 
        self.assertCurrentlyEqual(left, right)
1362
 
        # change all attributes and ensure equality changes as we do.
1363
 
        left.revision_index = 'a'
1364
 
        self.assertCurrentlyNotEqual(left, right)
1365
 
        right.revision_index = 'a'
1366
 
        self.assertCurrentlyEqual(left, right)
1367
 
        left.inventory_index = 'a'
1368
 
        self.assertCurrentlyNotEqual(left, right)
1369
 
        right.inventory_index = 'a'
1370
 
        self.assertCurrentlyEqual(left, right)
1371
 
        left.text_index = 'a'
1372
 
        self.assertCurrentlyNotEqual(left, right)
1373
 
        right.text_index = 'a'
1374
 
        self.assertCurrentlyEqual(left, right)
1375
 
        left.signature_index = 'a'
1376
 
        self.assertCurrentlyNotEqual(left, right)
1377
 
        right.signature_index = 'a'
1378
 
        self.assertCurrentlyEqual(left, right)
1379
 
        left.name = 'a'
1380
 
        self.assertCurrentlyNotEqual(left, right)
1381
 
        right.name = 'a'
1382
 
        self.assertCurrentlyEqual(left, right)
1383
 
        left.transport = 'a'
1384
 
        self.assertCurrentlyNotEqual(left, right)
1385
 
        right.transport = 'a'
1386
 
        self.assertCurrentlyEqual(left, right)
1387
 
 
1388
 
    def test_file_name(self):
1389
 
        pack = pack_repo.ExistingPack('', 'a_name', '', '', '', '')
1390
 
        self.assertEqual('a_name.pack', pack.file_name())
1391
 
 
1392
 
 
1393
 
class TestNewPack(TestCaseWithTransport):
1394
 
    """Tests for pack_repo.NewPack."""
1395
 
 
1396
 
    def test_new_instance_attributes(self):
1397
 
        upload_transport = self.get_transport('upload')
1398
 
        pack_transport = self.get_transport('pack')
1399
 
        index_transport = self.get_transport('index')
1400
 
        upload_transport.mkdir('.')
1401
 
        collection = pack_repo.RepositoryPackCollection(
1402
 
            repo=None,
1403
 
            transport=self.get_transport('.'),
1404
 
            index_transport=index_transport,
1405
 
            upload_transport=upload_transport,
1406
 
            pack_transport=pack_transport,
1407
 
            index_builder_class=BTreeBuilder,
1408
 
            index_class=BTreeGraphIndex,
1409
 
            use_chk_index=False)
1410
 
        pack = pack_repo.NewPack(collection)
1411
 
        self.addCleanup(pack.abort) # Make sure the write stream gets closed
1412
 
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
1413
 
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
1414
 
        self.assertIsInstance(pack._hash, type(osutils.md5()))
1415
 
        self.assertTrue(pack.upload_transport is upload_transport)
1416
 
        self.assertTrue(pack.index_transport is index_transport)
1417
 
        self.assertTrue(pack.pack_transport is pack_transport)
1418
 
        self.assertEqual(None, pack.index_sizes)
1419
 
        self.assertEqual(20, len(pack.random_name))
1420
 
        self.assertIsInstance(pack.random_name, str)
1421
 
        self.assertIsInstance(pack.start_time, float)
1422
 
 
1423
 
 
1424
 
class TestPacker(TestCaseWithTransport):
1425
 
    """Tests for the packs repository Packer class."""
1426
 
 
1427
 
    def test_pack_optimizes_pack_order(self):
1428
 
        builder = self.make_branch_builder('.', format="1.9")
1429
 
        builder.start_series()
1430
 
        builder.build_snapshot('A', None, [
1431
 
            ('add', ('', 'root-id', 'directory', None)),
1432
 
            ('add', ('f', 'f-id', 'file', 'content\n'))])
1433
 
        builder.build_snapshot('B', ['A'],
1434
 
            [('modify', ('f-id', 'new-content\n'))])
1435
 
        builder.build_snapshot('C', ['B'],
1436
 
            [('modify', ('f-id', 'third-content\n'))])
1437
 
        builder.build_snapshot('D', ['C'],
1438
 
            [('modify', ('f-id', 'fourth-content\n'))])
1439
 
        b = builder.get_branch()
1440
 
        b.lock_read()
1441
 
        builder.finish_series()
1442
 
        self.addCleanup(b.unlock)
1443
 
        # At this point, we should have 4 pack files available
1444
 
        # Because of how they were built, they correspond to
1445
 
        # ['D', 'C', 'B', 'A']
1446
 
        packs = b.repository._pack_collection.packs
1447
 
        packer = knitpack_repo.KnitPacker(b.repository._pack_collection,
1448
 
                                  packs, 'testing',
1449
 
                                  revision_ids=['B', 'C'])
1450
 
        # Now, when we are copying the B & C revisions, their pack files should
1451
 
        # be moved to the front of the stack
1452
 
        # The new ordering moves B & C to the front of the .packs attribute,
1453
 
        # and leaves the others in the original order.
1454
 
        new_packs = [packs[1], packs[2], packs[0], packs[3]]
1455
 
        new_pack = packer.pack()
1456
 
        self.assertEqual(new_packs, packer.packs)
1457
 
 
1458
 
 
1459
 
class TestOptimisingPacker(TestCaseWithTransport):
1460
 
    """Tests for the OptimisingPacker class."""
1461
 
 
1462
 
    def get_pack_collection(self):
1463
 
        repo = self.make_repository('.')
1464
 
        return repo._pack_collection
1465
 
 
1466
 
    def test_open_pack_will_optimise(self):
1467
 
        packer = knitpack_repo.OptimisingKnitPacker(self.get_pack_collection(),
1468
 
                                            [], '.test')
1469
 
        new_pack = packer.open_pack()
1470
 
        self.addCleanup(new_pack.abort) # ensure cleanup
1471
 
        self.assertIsInstance(new_pack, pack_repo.NewPack)
1472
 
        self.assertTrue(new_pack.revision_index._optimize_for_size)
1473
 
        self.assertTrue(new_pack.inventory_index._optimize_for_size)
1474
 
        self.assertTrue(new_pack.text_index._optimize_for_size)
1475
 
        self.assertTrue(new_pack.signature_index._optimize_for_size)
1476
 
 
1477
 
 
1478
 
class TestGCCHKPacker(TestCaseWithTransport):
1479
 
 
1480
 
    def make_abc_branch(self):
1481
 
        builder = self.make_branch_builder('source')
1482
 
        builder.start_series()
1483
 
        builder.build_snapshot('A', None, [
1484
 
            ('add', ('', 'root-id', 'directory', None)),
1485
 
            ('add', ('file', 'file-id', 'file', 'content\n')),
1486
 
            ])
1487
 
        builder.build_snapshot('B', ['A'], [
1488
 
            ('add', ('dir', 'dir-id', 'directory', None))])
1489
 
        builder.build_snapshot('C', ['B'], [
1490
 
            ('modify', ('file-id', 'new content\n'))])
1491
 
        builder.finish_series()
1492
 
        return builder.get_branch()
1493
 
 
1494
 
    def make_branch_with_disjoint_inventory_and_revision(self):
1495
 
        """a repo with separate packs for a revisions Revision and Inventory.
1496
 
 
1497
 
        There will be one pack file that holds the Revision content, and one
1498
 
        for the Inventory content.
1499
 
 
1500
 
        :return: (repository,
1501
 
                  pack_name_with_rev_A_Revision,
1502
 
                  pack_name_with_rev_A_Inventory,
1503
 
                  pack_name_with_rev_C_content)
1504
 
        """
1505
 
        b_source = self.make_abc_branch()
1506
 
        b_base = b_source.bzrdir.sprout('base', revision_id='A').open_branch()
1507
 
        b_stacked = b_base.bzrdir.sprout('stacked', stacked=True).open_branch()
1508
 
        b_stacked.lock_write()
1509
 
        self.addCleanup(b_stacked.unlock)
1510
 
        b_stacked.fetch(b_source, 'B')
1511
 
        # Now re-open the stacked repo directly (no fallbacks) so that we can
1512
 
        # fill in the A rev.
1513
 
        repo_not_stacked = b_stacked.bzrdir.open_repository()
1514
 
        repo_not_stacked.lock_write()
1515
 
        self.addCleanup(repo_not_stacked.unlock)
1516
 
        # Now we should have a pack file with A's inventory, but not its
1517
 
        # Revision
1518
 
        self.assertEqual([('A',), ('B',)],
1519
 
                         sorted(repo_not_stacked.inventories.keys()))
1520
 
        self.assertEqual([('B',)],
1521
 
                         sorted(repo_not_stacked.revisions.keys()))
1522
 
        stacked_pack_names = repo_not_stacked._pack_collection.names()
1523
 
        # We have a couple names here, figure out which has A's inventory
1524
 
        for name in stacked_pack_names:
1525
 
            pack = repo_not_stacked._pack_collection.get_pack_by_name(name)
1526
 
            keys = [n[1] for n in pack.inventory_index.iter_all_entries()]
1527
 
            if ('A',) in keys:
1528
 
                inv_a_pack_name = name
1529
 
                break
1530
 
        else:
1531
 
            self.fail('Could not find pack containing A\'s inventory')
1532
 
        repo_not_stacked.fetch(b_source.repository, 'A')
1533
 
        self.assertEqual([('A',), ('B',)],
1534
 
                         sorted(repo_not_stacked.revisions.keys()))
1535
 
        new_pack_names = set(repo_not_stacked._pack_collection.names())
1536
 
        rev_a_pack_names = new_pack_names.difference(stacked_pack_names)
1537
 
        self.assertEqual(1, len(rev_a_pack_names))
1538
 
        rev_a_pack_name = list(rev_a_pack_names)[0]
1539
 
        # Now fetch 'C', so we have a couple pack files to join
1540
 
        repo_not_stacked.fetch(b_source.repository, 'C')
1541
 
        rev_c_pack_names = set(repo_not_stacked._pack_collection.names())
1542
 
        rev_c_pack_names = rev_c_pack_names.difference(new_pack_names)
1543
 
        self.assertEqual(1, len(rev_c_pack_names))
1544
 
        rev_c_pack_name = list(rev_c_pack_names)[0]
1545
 
        return (repo_not_stacked, rev_a_pack_name, inv_a_pack_name,
1546
 
                rev_c_pack_name)
1547
 
 
1548
 
    def test_pack_with_distant_inventories(self):
1549
 
        # See https://bugs.launchpad.net/bzr/+bug/437003
1550
 
        # When repacking, it is possible to have an inventory in a different
1551
 
        # pack file than the associated revision. An autopack can then come
1552
 
        # along, and miss that inventory, and complain.
1553
 
        (repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
1554
 
         ) = self.make_branch_with_disjoint_inventory_and_revision()
1555
 
        a_pack = repo._pack_collection.get_pack_by_name(rev_a_pack_name)
1556
 
        c_pack = repo._pack_collection.get_pack_by_name(rev_c_pack_name)
1557
 
        packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
1558
 
                    [a_pack, c_pack], '.test-pack')
1559
 
        # This would raise ValueError in bug #437003, but should not raise an
1560
 
        # error once fixed.
1561
 
        packer.pack()
1562
 
 
1563
 
    def test_pack_with_missing_inventory(self):
1564
 
        # Similar to test_pack_with_missing_inventory, but this time, we force
1565
 
        # the A inventory to actually be gone from the repository.
1566
 
        (repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
1567
 
         ) = self.make_branch_with_disjoint_inventory_and_revision()
1568
 
        inv_a_pack = repo._pack_collection.get_pack_by_name(inv_a_pack_name)
1569
 
        repo._pack_collection._remove_pack_from_memory(inv_a_pack)
1570
 
        packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
1571
 
            repo._pack_collection.all_packs(), '.test-pack')
1572
 
        e = self.assertRaises(ValueError, packer.pack)
1573
 
        packer.new_pack.abort()
1574
 
        self.assertContainsRe(str(e),
1575
 
            r"We are missing inventories for revisions: .*'A'")
1576
 
 
1577
 
 
1578
 
class TestCrossFormatPacks(TestCaseWithTransport):
1579
 
 
1580
 
    def log_pack(self, hint=None):
1581
 
        self.calls.append(('pack', hint))
1582
 
        self.orig_pack(hint=hint)
1583
 
        if self.expect_hint:
1584
 
            self.assertTrue(hint)
1585
 
 
1586
 
    def run_stream(self, src_fmt, target_fmt, expect_pack_called):
1587
 
        self.expect_hint = expect_pack_called
1588
 
        self.calls = []
1589
 
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
1590
 
        source_tree.lock_write()
1591
 
        self.addCleanup(source_tree.unlock)
1592
 
        tip = source_tree.commit('foo')
1593
 
        target = self.make_repository('target', format=target_fmt)
1594
 
        target.lock_write()
1595
 
        self.addCleanup(target.unlock)
1596
 
        source = source_tree.branch.repository._get_source(target._format)
1597
 
        self.orig_pack = target.pack
1598
 
        target.pack = self.log_pack
1599
 
        search = target.search_missing_revision_ids(
1600
 
            source_tree.branch.repository, revision_ids=[tip])
1601
 
        stream = source.get_stream(search)
1602
 
        from_format = source_tree.branch.repository._format
1603
 
        sink = target._get_sink()
1604
 
        sink.insert_stream(stream, from_format, [])
1605
 
        if expect_pack_called:
1606
 
            self.assertLength(1, self.calls)
1607
 
        else:
1608
 
            self.assertLength(0, self.calls)
1609
 
 
1610
 
    def run_fetch(self, src_fmt, target_fmt, expect_pack_called):
1611
 
        self.expect_hint = expect_pack_called
1612
 
        self.calls = []
1613
 
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
1614
 
        source_tree.lock_write()
1615
 
        self.addCleanup(source_tree.unlock)
1616
 
        tip = source_tree.commit('foo')
1617
 
        target = self.make_repository('target', format=target_fmt)
1618
 
        target.lock_write()
1619
 
        self.addCleanup(target.unlock)
1620
 
        source = source_tree.branch.repository
1621
 
        self.orig_pack = target.pack
1622
 
        target.pack = self.log_pack
1623
 
        target.fetch(source)
1624
 
        if expect_pack_called:
1625
 
            self.assertLength(1, self.calls)
1626
 
        else:
1627
 
            self.assertLength(0, self.calls)
1628
 
 
1629
 
    def test_sink_format_hint_no(self):
1630
 
        # When the target format says packing makes no difference, pack is not
1631
 
        # called.
1632
 
        self.run_stream('1.9', 'rich-root-pack', False)
1633
 
 
1634
 
    def test_sink_format_hint_yes(self):
1635
 
        # When the target format says packing makes a difference, pack is
1636
 
        # called.
1637
 
        self.run_stream('1.9', '2a', True)
1638
 
 
1639
 
    def test_sink_format_same_no(self):
1640
 
        # When the formats are the same, pack is not called.
1641
 
        self.run_stream('2a', '2a', False)
1642
 
 
1643
 
    def test_IDS_format_hint_no(self):
1644
 
        # When the target format says packing makes no difference, pack is not
1645
 
        # called.
1646
 
        self.run_fetch('1.9', 'rich-root-pack', False)
1647
 
 
1648
 
    def test_IDS_format_hint_yes(self):
1649
 
        # When the target format says packing makes a difference, pack is
1650
 
        # called.
1651
 
        self.run_fetch('1.9', '2a', True)
1652
 
 
1653
 
    def test_IDS_format_same_no(self):
1654
 
        # When the formats are the same, pack is not called.
1655
 
        self.run_fetch('2a', '2a', False)