/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/tests/test_repository.py

  • Committer: Jelmer Vernooij
  • Date: 2018-07-08 14:45:27 UTC
  • mto: This revision was merged to the branch mainline in revision 7036.
  • Revision ID: jelmer@jelmer.uk-20180708144527-codhlvdcdg9y0nji
Fix a bunch of merge tests.

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006-2010 Canonical Ltd
 
1
# Copyright (C) 2006-2012, 2016 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
23
23
"""
24
24
 
25
25
from stat import S_ISDIR
26
 
import sys
27
26
 
28
 
import bzrlib
29
 
from bzrlib.errors import (NoSuchFile,
30
 
                           UnknownFormatError,
31
 
                           UnsupportedFormatError,
32
 
                           )
33
 
from bzrlib import (
34
 
    graph,
 
27
import breezy
 
28
from breezy.errors import (
 
29
    UnknownFormatError,
 
30
    UnsupportedFormatError,
 
31
    )
 
32
from breezy import (
35
33
    tests,
36
 
    )
37
 
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
38
 
from bzrlib.index import GraphIndex
39
 
from bzrlib.repository import RepositoryFormat
40
 
from bzrlib.tests import (
 
34
    transport,
 
35
    )
 
36
from breezy.bzr import (
 
37
    bzrdir,
 
38
    btree_index,
 
39
    inventory,
 
40
    repository as bzrrepository,
 
41
    versionedfile,
 
42
    vf_repository,
 
43
    vf_search,
 
44
    )
 
45
from breezy.bzr.btree_index import BTreeBuilder, BTreeGraphIndex
 
46
from breezy.bzr.index import GraphIndex
 
47
from breezy.repository import RepositoryFormat
 
48
from breezy.tests import (
41
49
    TestCase,
42
50
    TestCaseWithTransport,
43
51
    )
44
 
from bzrlib.transport import (
45
 
    get_transport,
46
 
    )
47
 
from bzrlib import (
48
 
    bzrdir,
 
52
from breezy import (
 
53
    controldir,
49
54
    errors,
50
 
    inventory,
51
55
    osutils,
52
56
    repository,
53
57
    revision as _mod_revision,
54
58
    upgrade,
55
 
    versionedfile,
56
59
    workingtree,
57
60
    )
58
 
from bzrlib.repofmt import (
 
61
from breezy.bzr import (
59
62
    groupcompress_repo,
60
63
    knitrepo,
 
64
    knitpack_repo,
61
65
    pack_repo,
62
 
    weaverepo,
63
66
    )
64
67
 
65
68
 
66
69
class TestDefaultFormat(TestCase):
67
70
 
68
71
    def test_get_set_default_format(self):
69
 
        old_default = bzrdir.format_registry.get('default')
 
72
        old_default = controldir.format_registry.get('default')
70
73
        private_default = old_default().repository_format.__class__
71
 
        old_format = repository.RepositoryFormat.get_default_format()
 
74
        old_format = repository.format_registry.get_default()
72
75
        self.assertTrue(isinstance(old_format, private_default))
73
76
        def make_sample_bzrdir():
74
77
            my_bzrdir = bzrdir.BzrDirMetaFormat1()
75
78
            my_bzrdir.repository_format = SampleRepositoryFormat()
76
79
            return my_bzrdir
77
 
        bzrdir.format_registry.remove('default')
78
 
        bzrdir.format_registry.register('sample', make_sample_bzrdir, '')
79
 
        bzrdir.format_registry.set_default('sample')
 
80
        controldir.format_registry.remove('default')
 
81
        controldir.format_registry.register('sample', make_sample_bzrdir, '')
 
82
        controldir.format_registry.set_default('sample')
80
83
        # creating a repository should now create an instrumented dir.
81
84
        try:
82
85
            # the default branch format is used by the meta dir format
85
88
            result = dir.create_repository()
86
89
            self.assertEqual(result, 'A bzr repository dir')
87
90
        finally:
88
 
            bzrdir.format_registry.remove('default')
89
 
            bzrdir.format_registry.remove('sample')
90
 
            bzrdir.format_registry.register('default', old_default, '')
91
 
        self.assertIsInstance(repository.RepositoryFormat.get_default_format(),
 
91
            controldir.format_registry.remove('default')
 
92
            controldir.format_registry.remove('sample')
 
93
            controldir.format_registry.register('default', old_default, '')
 
94
        self.assertIsInstance(repository.format_registry.get_default(),
92
95
                              old_format.__class__)
93
96
 
94
97
 
95
 
class SampleRepositoryFormat(repository.RepositoryFormat):
 
98
class SampleRepositoryFormat(bzrrepository.RepositoryFormatMetaDir):
96
99
    """A sample format
97
100
 
98
101
    this format is initializable, unsupported to aid in testing the
99
102
    open and open(unsupported=True) routines.
100
103
    """
101
104
 
102
 
    def get_format_string(self):
 
105
    @classmethod
 
106
    def get_format_string(cls):
103
107
        """See RepositoryFormat.get_format_string()."""
104
 
        return "Sample .bzr repository format."
 
108
        return b"Sample .bzr repository format."
105
109
 
106
 
    def initialize(self, a_bzrdir, shared=False):
 
110
    def initialize(self, a_controldir, shared=False):
107
111
        """Initialize a repository in a BzrDir"""
108
 
        t = a_bzrdir.get_repository_transport(self)
 
112
        t = a_controldir.get_repository_transport(self)
109
113
        t.put_bytes('format', self.get_format_string())
110
114
        return 'A bzr repository dir'
111
115
 
112
116
    def is_supported(self):
113
117
        return False
114
118
 
115
 
    def open(self, a_bzrdir, _found=False):
 
119
    def open(self, a_controldir, _found=False):
116
120
        return "opened repository."
117
121
 
118
122
 
 
123
class SampleExtraRepositoryFormat(repository.RepositoryFormat):
 
124
    """A sample format that can not be used in a metadir
 
125
 
 
126
    """
 
127
 
 
128
    def get_format_string(self):
 
129
        raise NotImplementedError
 
130
 
 
131
 
119
132
class TestRepositoryFormat(TestCaseWithTransport):
120
133
    """Tests for the Repository format detection used by the bzr meta dir facility.BzrBranchFormat facility."""
121
134
 
125
138
        # this is not quite the same as
126
139
        self.build_tree(["foo/", "bar/"])
127
140
        def check_format(format, url):
128
 
            dir = format._matchingbzrdir.initialize(url)
 
141
            dir = format._matchingcontroldir.initialize(url)
129
142
            format.initialize(dir)
130
 
            t = get_transport(url)
131
 
            found_format = repository.RepositoryFormat.find_format(dir)
132
 
            self.failUnless(isinstance(found_format, format.__class__))
133
 
        check_format(weaverepo.RepositoryFormat7(), "bar")
 
143
            t = transport.get_transport_from_path(url)
 
144
            found_format = bzrrepository.RepositoryFormatMetaDir.find_format(dir)
 
145
            self.assertIsInstance(found_format, format.__class__)
 
146
        check_format(repository.format_registry.get_default(), "bar")
134
147
 
135
148
    def test_find_format_no_repository(self):
136
149
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
137
150
        self.assertRaises(errors.NoRepositoryPresent,
138
 
                          repository.RepositoryFormat.find_format,
 
151
                          bzrrepository.RepositoryFormatMetaDir.find_format,
139
152
                          dir)
140
153
 
 
154
    def test_from_string(self):
 
155
        self.assertIsInstance(
 
156
            SampleRepositoryFormat.from_string(
 
157
                b"Sample .bzr repository format."),
 
158
            SampleRepositoryFormat)
 
159
        self.assertRaises(AssertionError,
 
160
            SampleRepositoryFormat.from_string,
 
161
                b"Different .bzr repository format.")
 
162
 
141
163
    def test_find_format_unknown_format(self):
142
164
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
143
165
        SampleRepositoryFormat().initialize(dir)
144
166
        self.assertRaises(UnknownFormatError,
145
 
                          repository.RepositoryFormat.find_format,
 
167
                          bzrrepository.RepositoryFormatMetaDir.find_format,
146
168
                          dir)
147
169
 
 
170
    def test_find_format_with_features(self):
 
171
        tree = self.make_branch_and_tree('.', format='2a')
 
172
        tree.branch.repository.update_feature_flags({b"name": b"necessity"})
 
173
        found_format = bzrrepository.RepositoryFormatMetaDir.find_format(tree.controldir)
 
174
        self.assertIsInstance(found_format, bzrrepository.RepositoryFormatMetaDir)
 
175
        self.assertEqual(found_format.features.get(b"name"), b"necessity")
 
176
        self.assertRaises(bzrdir.MissingFeature, found_format.check_support_status,
 
177
            True)
 
178
        self.addCleanup(bzrrepository.RepositoryFormatMetaDir.unregister_feature,
 
179
            b"name")
 
180
        bzrrepository.RepositoryFormatMetaDir.register_feature(b"name")
 
181
        found_format.check_support_status(True)
 
182
 
 
183
 
 
184
class TestRepositoryFormatRegistry(TestCase):
 
185
 
 
186
    def setUp(self):
 
187
        super(TestRepositoryFormatRegistry, self).setUp()
 
188
        self.registry = repository.RepositoryFormatRegistry()
 
189
 
148
190
    def test_register_unregister_format(self):
149
191
        format = SampleRepositoryFormat()
150
 
        # make a control dir
151
 
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
152
 
        # make a repo
153
 
        format.initialize(dir)
154
 
        # register a format for it.
155
 
        repository.RepositoryFormat.register_format(format)
156
 
        # which repository.Open will refuse (not supported)
157
 
        self.assertRaises(UnsupportedFormatError, repository.Repository.open, self.get_url())
158
 
        # but open(unsupported) will work
159
 
        self.assertEqual(format.open(dir), "opened repository.")
160
 
        # unregister the format
161
 
        repository.RepositoryFormat.unregister_format(format)
162
 
 
163
 
 
164
 
class TestFormat6(TestCaseWithTransport):
165
 
 
166
 
    def test_attribute__fetch_order(self):
167
 
        """Weaves need topological data insertion."""
168
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
169
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
170
 
        self.assertEqual('topological', repo._format._fetch_order)
171
 
 
172
 
    def test_attribute__fetch_uses_deltas(self):
173
 
        """Weaves do not reuse deltas."""
174
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
175
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
176
 
        self.assertEqual(False, repo._format._fetch_uses_deltas)
177
 
 
178
 
    def test_attribute__fetch_reconcile(self):
179
 
        """Weave repositories need a reconcile after fetch."""
180
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
181
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
182
 
        self.assertEqual(True, repo._format._fetch_reconcile)
183
 
 
184
 
    def test_no_ancestry_weave(self):
185
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
186
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
187
 
        # We no longer need to create the ancestry.weave file
188
 
        # since it is *never* used.
189
 
        self.assertRaises(NoSuchFile,
190
 
                          control.transport.get,
191
 
                          'ancestry.weave')
192
 
 
193
 
    def test_supports_external_lookups(self):
194
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
195
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
196
 
        self.assertFalse(repo._format.supports_external_lookups)
197
 
 
198
 
 
199
 
class TestFormat7(TestCaseWithTransport):
200
 
 
201
 
    def test_attribute__fetch_order(self):
202
 
        """Weaves need topological data insertion."""
203
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
204
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
205
 
        self.assertEqual('topological', repo._format._fetch_order)
206
 
 
207
 
    def test_attribute__fetch_uses_deltas(self):
208
 
        """Weaves do not reuse deltas."""
209
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
210
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
211
 
        self.assertEqual(False, repo._format._fetch_uses_deltas)
212
 
 
213
 
    def test_attribute__fetch_reconcile(self):
214
 
        """Weave repositories need a reconcile after fetch."""
215
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
216
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
217
 
        self.assertEqual(True, repo._format._fetch_reconcile)
218
 
 
219
 
    def test_disk_layout(self):
220
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
221
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
222
 
        # in case of side effects of locking.
223
 
        repo.lock_write()
224
 
        repo.unlock()
225
 
        # we want:
226
 
        # format 'Bazaar-NG Repository format 7'
227
 
        # lock ''
228
 
        # inventory.weave == empty_weave
229
 
        # empty revision-store directory
230
 
        # empty weaves directory
231
 
        t = control.get_repository_transport(None)
232
 
        self.assertEqualDiff('Bazaar-NG Repository format 7',
233
 
                             t.get('format').read())
234
 
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
235
 
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
236
 
        self.assertEqualDiff('# bzr weave file v5\n'
237
 
                             'w\n'
238
 
                             'W\n',
239
 
                             t.get('inventory.weave').read())
240
 
        # Creating a file with id Foo:Bar results in a non-escaped file name on
241
 
        # disk.
242
 
        control.create_branch()
243
 
        tree = control.create_workingtree()
244
 
        tree.add(['foo'], ['Foo:Bar'], ['file'])
245
 
        tree.put_file_bytes_non_atomic('Foo:Bar', 'content\n')
246
 
        try:
247
 
            tree.commit('first post', rev_id='first')
248
 
        except errors.IllegalPath:
249
 
            if sys.platform != 'win32':
250
 
                raise
251
 
            self.knownFailure('Foo:Bar cannot be used as a file-id on windows'
252
 
                              ' in repo format 7')
253
 
            return
254
 
        self.assertEqualDiff(
255
 
            '# bzr weave file v5\n'
256
 
            'i\n'
257
 
            '1 7fe70820e08a1aac0ef224d9c66ab66831cc4ab1\n'
258
 
            'n first\n'
259
 
            '\n'
260
 
            'w\n'
261
 
            '{ 0\n'
262
 
            '. content\n'
263
 
            '}\n'
264
 
            'W\n',
265
 
            t.get('weaves/74/Foo%3ABar.weave').read())
266
 
 
267
 
    def test_shared_disk_layout(self):
268
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
269
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
270
 
        # we want:
271
 
        # format 'Bazaar-NG Repository format 7'
272
 
        # inventory.weave == empty_weave
273
 
        # empty revision-store directory
274
 
        # empty weaves directory
275
 
        # a 'shared-storage' marker file.
276
 
        # lock is not present when unlocked
277
 
        t = control.get_repository_transport(None)
278
 
        self.assertEqualDiff('Bazaar-NG Repository format 7',
279
 
                             t.get('format').read())
280
 
        self.assertEqualDiff('', t.get('shared-storage').read())
281
 
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
282
 
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
283
 
        self.assertEqualDiff('# bzr weave file v5\n'
284
 
                             'w\n'
285
 
                             'W\n',
286
 
                             t.get('inventory.weave').read())
287
 
        self.assertFalse(t.has('branch-lock'))
288
 
 
289
 
    def test_creates_lockdir(self):
290
 
        """Make sure it appears to be controlled by a LockDir existence"""
291
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
292
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
293
 
        t = control.get_repository_transport(None)
294
 
        # TODO: Should check there is a 'lock' toplevel directory,
295
 
        # regardless of contents
296
 
        self.assertFalse(t.has('lock/held/info'))
297
 
        repo.lock_write()
298
 
        try:
299
 
            self.assertTrue(t.has('lock/held/info'))
300
 
        finally:
301
 
            # unlock so we don't get a warning about failing to do so
302
 
            repo.unlock()
303
 
 
304
 
    def test_uses_lockdir(self):
305
 
        """repo format 7 actually locks on lockdir"""
306
 
        base_url = self.get_url()
307
 
        control = bzrdir.BzrDirMetaFormat1().initialize(base_url)
308
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
309
 
        t = control.get_repository_transport(None)
310
 
        repo.lock_write()
311
 
        repo.unlock()
312
 
        del repo
313
 
        # make sure the same lock is created by opening it
314
 
        repo = repository.Repository.open(base_url)
315
 
        repo.lock_write()
316
 
        self.assertTrue(t.has('lock/held/info'))
317
 
        repo.unlock()
318
 
        self.assertFalse(t.has('lock/held/info'))
319
 
 
320
 
    def test_shared_no_tree_disk_layout(self):
321
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
322
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
323
 
        repo.set_make_working_trees(False)
324
 
        # we want:
325
 
        # format 'Bazaar-NG Repository format 7'
326
 
        # lock ''
327
 
        # inventory.weave == empty_weave
328
 
        # empty revision-store directory
329
 
        # empty weaves directory
330
 
        # a 'shared-storage' marker file.
331
 
        t = control.get_repository_transport(None)
332
 
        self.assertEqualDiff('Bazaar-NG Repository format 7',
333
 
                             t.get('format').read())
334
 
        ## self.assertEqualDiff('', t.get('lock').read())
335
 
        self.assertEqualDiff('', t.get('shared-storage').read())
336
 
        self.assertEqualDiff('', t.get('no-working-trees').read())
337
 
        repo.set_make_working_trees(True)
338
 
        self.assertFalse(t.has('no-working-trees'))
339
 
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
340
 
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
341
 
        self.assertEqualDiff('# bzr weave file v5\n'
342
 
                             'w\n'
343
 
                             'W\n',
344
 
                             t.get('inventory.weave').read())
345
 
 
346
 
    def test_supports_external_lookups(self):
347
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
348
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
349
 
        self.assertFalse(repo._format.supports_external_lookups)
 
192
        self.registry.register(format)
 
193
        self.assertEqual(format, self.registry.get(b"Sample .bzr repository format."))
 
194
        self.registry.remove(format)
 
195
        self.assertRaises(KeyError, self.registry.get, b"Sample .bzr repository format.")
 
196
 
 
197
    def test_get_all(self):
 
198
        format = SampleRepositoryFormat()
 
199
        self.assertEqual([], self.registry._get_all())
 
200
        self.registry.register(format)
 
201
        self.assertEqual([format], self.registry._get_all())
 
202
 
 
203
    def test_register_extra(self):
 
204
        format = SampleExtraRepositoryFormat()
 
205
        self.assertEqual([], self.registry._get_all())
 
206
        self.registry.register_extra(format)
 
207
        self.assertEqual([format], self.registry._get_all())
 
208
 
 
209
    def test_register_extra_lazy(self):
 
210
        self.assertEqual([], self.registry._get_all())
 
211
        self.registry.register_extra_lazy("breezy.tests.test_repository",
 
212
            "SampleExtraRepositoryFormat")
 
213
        formats = self.registry._get_all()
 
214
        self.assertEqual(1, len(formats))
 
215
        self.assertIsInstance(formats[0], SampleExtraRepositoryFormat)
350
216
 
351
217
 
352
218
class TestFormatKnit1(TestCaseWithTransport):
354
220
    def test_attribute__fetch_order(self):
355
221
        """Knits need topological data insertion."""
356
222
        repo = self.make_repository('.',
357
 
                format=bzrdir.format_registry.get('knit')())
 
223
                format=controldir.format_registry.get('knit')())
358
224
        self.assertEqual('topological', repo._format._fetch_order)
359
225
 
360
226
    def test_attribute__fetch_uses_deltas(self):
361
227
        """Knits reuse deltas."""
362
228
        repo = self.make_repository('.',
363
 
                format=bzrdir.format_registry.get('knit')())
 
229
                format=controldir.format_registry.get('knit')())
364
230
        self.assertEqual(True, repo._format._fetch_uses_deltas)
365
231
 
366
232
    def test_disk_layout(self):
376
242
        # empty revision-store directory
377
243
        # empty weaves directory
378
244
        t = control.get_repository_transport(None)
379
 
        self.assertEqualDiff('Bazaar-NG Knit Repository Format 1',
380
 
                             t.get('format').read())
 
245
        with t.get('format') as f:
 
246
            self.assertEqualDiff(b'Bazaar-NG Knit Repository Format 1',
 
247
                                 f.read())
381
248
        # XXX: no locks left when unlocked at the moment
382
249
        # self.assertEqualDiff('', t.get('lock').read())
383
250
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
385
252
        # Check per-file knits.
386
253
        branch = control.create_branch()
387
254
        tree = control.create_workingtree()
388
 
        tree.add(['foo'], ['Nasty-IdC:'], ['file'])
389
 
        tree.put_file_bytes_non_atomic('Nasty-IdC:', '')
390
 
        tree.commit('1st post', rev_id='foo')
 
255
        tree.add(['foo'], [b'Nasty-IdC:'], ['file'])
 
256
        tree.put_file_bytes_non_atomic('foo', b'')
 
257
        tree.commit('1st post', rev_id=b'foo')
391
258
        self.assertHasKnit(t, 'knits/e8/%254easty-%2549d%2543%253a',
392
259
            '\nfoo fulltext 0 81  :')
393
260
 
452
319
        is valid when the api is not being abused.
453
320
        """
454
321
        repo = self.make_repository('.',
455
 
                format=bzrdir.format_registry.get('knit')())
456
 
        inv_xml = '<inventory format="5">\n</inventory>\n'
457
 
        inv = repo._deserialise_inventory('test-rev-id', inv_xml)
458
 
        self.assertEqual('test-rev-id', inv.root.revision)
 
322
                format=controldir.format_registry.get('knit')())
 
323
        inv_xml = b'<inventory format="5">\n</inventory>\n'
 
324
        inv = repo._deserialise_inventory(b'test-rev-id', inv_xml)
 
325
        self.assertEqual(b'test-rev-id', inv.root.revision)
459
326
 
460
327
    def test_deserialise_uses_global_revision_id(self):
461
328
        """If it is set, then we re-use the global revision id"""
462
329
        repo = self.make_repository('.',
463
 
                format=bzrdir.format_registry.get('knit')())
464
 
        inv_xml = ('<inventory format="5" revision_id="other-rev-id">\n'
465
 
                   '</inventory>\n')
 
330
                format=controldir.format_registry.get('knit')())
 
331
        inv_xml = (b'<inventory format="5" revision_id="other-rev-id">\n'
 
332
                   b'</inventory>\n')
466
333
        # Arguably, the deserialise_inventory should detect a mismatch, and
467
334
        # raise an error, rather than silently using one revision_id over the
468
335
        # other.
469
336
        self.assertRaises(AssertionError, repo._deserialise_inventory,
470
 
            'test-rev-id', inv_xml)
471
 
        inv = repo._deserialise_inventory('other-rev-id', inv_xml)
472
 
        self.assertEqual('other-rev-id', inv.root.revision)
 
337
            b'test-rev-id', inv_xml)
 
338
        inv = repo._deserialise_inventory(b'other-rev-id', inv_xml)
 
339
        self.assertEqual(b'other-rev-id', inv.root.revision)
473
340
 
474
341
    def test_supports_external_lookups(self):
475
342
        repo = self.make_repository('.',
476
 
                format=bzrdir.format_registry.get('knit')())
 
343
                format=controldir.format_registry.get('knit')())
477
344
        self.assertFalse(repo._format.supports_external_lookups)
478
345
 
479
346
 
520
387
        # classes do not barf inappropriately when a surprising repository type
521
388
        # is handed to them.
522
389
        dummy_a = DummyRepository()
 
390
        dummy_a._format = RepositoryFormat()
 
391
        dummy_a._format.supports_full_versioned_files = True
523
392
        dummy_b = DummyRepository()
 
393
        dummy_b._format = RepositoryFormat()
 
394
        dummy_b._format.supports_full_versioned_files = True
524
395
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
525
396
 
526
397
    def assertGetsDefaultInterRepository(self, repo_a, repo_b):
530
401
        no actual sane default in the presence of incompatible data models.
531
402
        """
532
403
        inter_repo = repository.InterRepository.get(repo_a, repo_b)
533
 
        self.assertEqual(repository.InterSameDataRepository,
 
404
        self.assertEqual(vf_repository.InterSameDataRepository,
534
405
                         inter_repo.__class__)
535
406
        self.assertEqual(repo_a, inter_repo.source)
536
407
        self.assertEqual(repo_b, inter_repo.target)
550
421
        dummy_a._serializer = repo._serializer
551
422
        dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
552
423
        dummy_a._format.rich_root_data = repo._format.rich_root_data
 
424
        dummy_a._format.supports_full_versioned_files = repo._format.supports_full_versioned_files
553
425
        dummy_b._serializer = repo._serializer
554
426
        dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
555
427
        dummy_b._format.rich_root_data = repo._format.rich_root_data
 
428
        dummy_b._format.supports_full_versioned_files = repo._format.supports_full_versioned_files
556
429
        repository.InterRepository.register_optimiser(InterDummy)
557
430
        try:
558
431
            # we should get the default for something InterDummy returns False
571
444
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
572
445
 
573
446
 
574
 
class TestInterWeaveRepo(TestCaseWithTransport):
575
 
 
576
 
    def test_is_compatible_and_registered(self):
577
 
        # InterWeaveRepo is compatible when either side
578
 
        # is a format 5/6/7 branch
579
 
        from bzrlib.repofmt import knitrepo, weaverepo
580
 
        formats = [weaverepo.RepositoryFormat5(),
581
 
                   weaverepo.RepositoryFormat6(),
582
 
                   weaverepo.RepositoryFormat7()]
583
 
        incompatible_formats = [weaverepo.RepositoryFormat4(),
584
 
                                knitrepo.RepositoryFormatKnit1(),
585
 
                                ]
586
 
        repo_a = self.make_repository('a')
587
 
        repo_b = self.make_repository('b')
588
 
        is_compatible = repository.InterWeaveRepo.is_compatible
589
 
        for source in incompatible_formats:
590
 
            # force incompatible left then right
591
 
            repo_a._format = source
592
 
            repo_b._format = formats[0]
593
 
            self.assertFalse(is_compatible(repo_a, repo_b))
594
 
            self.assertFalse(is_compatible(repo_b, repo_a))
595
 
        for source in formats:
596
 
            repo_a._format = source
597
 
            for target in formats:
598
 
                repo_b._format = target
599
 
                self.assertTrue(is_compatible(repo_a, repo_b))
600
 
        self.assertEqual(repository.InterWeaveRepo,
601
 
                         repository.InterRepository.get(repo_a,
602
 
                                                        repo_b).__class__)
 
447
class TestRepositoryFormat1(knitrepo.RepositoryFormatKnit1):
 
448
 
 
449
    @classmethod
 
450
    def get_format_string(cls):
 
451
        return b"Test Format 1"
 
452
 
 
453
 
 
454
class TestRepositoryFormat2(knitrepo.RepositoryFormatKnit1):
 
455
 
 
456
    @classmethod
 
457
    def get_format_string(cls):
 
458
        return b"Test Format 2"
603
459
 
604
460
 
605
461
class TestRepositoryConverter(TestCaseWithTransport):
606
462
 
607
463
    def test_convert_empty(self):
608
 
        t = get_transport(self.get_url('.'))
 
464
        source_format = TestRepositoryFormat1()
 
465
        target_format = TestRepositoryFormat2()
 
466
        repository.format_registry.register(source_format)
 
467
        self.addCleanup(repository.format_registry.remove,
 
468
            source_format)
 
469
        repository.format_registry.register(target_format)
 
470
        self.addCleanup(repository.format_registry.remove,
 
471
            target_format)
 
472
        t = self.get_transport()
609
473
        t.mkdir('repository')
610
474
        repo_dir = bzrdir.BzrDirMetaFormat1().initialize('repository')
611
 
        repo = weaverepo.RepositoryFormat7().initialize(repo_dir)
612
 
        target_format = knitrepo.RepositoryFormatKnit1()
 
475
        repo = TestRepositoryFormat1().initialize(repo_dir)
613
476
        converter = repository.CopyConverter(target_format)
614
 
        pb = bzrlib.ui.ui_factory.nested_progress_bar()
615
 
        try:
 
477
        with breezy.ui.ui_factory.nested_progress_bar() as pb:
616
478
            converter.convert(repo, pb)
617
 
        finally:
618
 
            pb.finished()
619
479
        repo = repo_dir.open_repository()
620
480
        self.assertTrue(isinstance(target_format, repo._format.__class__))
621
481
 
622
482
 
623
 
class TestMisc(TestCase):
624
 
 
625
 
    def test_unescape_xml(self):
626
 
        """We get some kind of error when malformed entities are passed"""
627
 
        self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;')
628
 
 
629
 
 
630
483
class TestRepositoryFormatKnit3(TestCaseWithTransport):
631
484
 
632
485
    def test_attribute__fetch_order(self):
648
501
        format = bzrdir.BzrDirMetaFormat1()
649
502
        format.repository_format = knitrepo.RepositoryFormatKnit1()
650
503
        tree = self.make_branch_and_tree('.', format)
651
 
        tree.commit("Dull commit", rev_id="dull")
652
 
        revision_tree = tree.branch.repository.revision_tree('dull')
 
504
        tree.commit("Dull commit", rev_id=b"dull")
 
505
        revision_tree = tree.branch.repository.revision_tree(b'dull')
653
506
        revision_tree.lock_read()
654
507
        try:
655
508
            self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
656
 
                revision_tree.inventory.root.file_id)
 
509
                u'', revision_tree.get_root_id())
657
510
        finally:
658
511
            revision_tree.unlock()
659
512
        format = bzrdir.BzrDirMetaFormat1()
660
513
        format.repository_format = knitrepo.RepositoryFormatKnit3()
661
514
        upgrade.Convert('.', format)
662
515
        tree = workingtree.WorkingTree.open('.')
663
 
        revision_tree = tree.branch.repository.revision_tree('dull')
 
516
        revision_tree = tree.branch.repository.revision_tree(b'dull')
664
517
        revision_tree.lock_read()
665
518
        try:
666
 
            revision_tree.get_file_lines(revision_tree.inventory.root.file_id)
 
519
            revision_tree.get_file_lines(u'', revision_tree.get_root_id())
667
520
        finally:
668
521
            revision_tree.unlock()
669
 
        tree.commit("Another dull commit", rev_id='dull2')
670
 
        revision_tree = tree.branch.repository.revision_tree('dull2')
 
522
        tree.commit("Another dull commit", rev_id=b'dull2')
 
523
        revision_tree = tree.branch.repository.revision_tree(b'dull2')
671
524
        revision_tree.lock_read()
672
525
        self.addCleanup(revision_tree.unlock)
673
 
        self.assertEqual('dull', revision_tree.inventory.root.revision)
 
526
        self.assertEqual('dull',
 
527
                revision_tree.get_file_revision(u'', revision_tree.get_root_id()))
674
528
 
675
529
    def test_supports_external_lookups(self):
676
530
        format = bzrdir.BzrDirMetaFormat1()
681
535
 
682
536
class Test2a(tests.TestCaseWithMemoryTransport):
683
537
 
684
 
    def test_fetch_combines_groups(self):
685
 
        builder = self.make_branch_builder('source', format='2a')
686
 
        builder.start_series()
687
 
        builder.build_snapshot('1', None, [
688
 
            ('add', ('', 'root-id', 'directory', '')),
689
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
690
 
        builder.build_snapshot('2', ['1'], [
691
 
            ('modify', ('file-id', 'content-2\n'))])
692
 
        builder.finish_series()
693
 
        source = builder.get_branch()
694
 
        target = self.make_repository('target', format='2a')
695
 
        target.fetch(source.repository)
696
 
        target.lock_read()
697
 
        self.addCleanup(target.unlock)
698
 
        details = target.texts._index.get_build_details(
699
 
            [('file-id', '1',), ('file-id', '2',)])
700
 
        file_1_details = details[('file-id', '1')]
701
 
        file_2_details = details[('file-id', '2')]
702
 
        # The index, and what to read off disk, should be the same for both
703
 
        # versions of the file.
704
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
705
 
 
706
 
    def test_fetch_combines_groups(self):
707
 
        builder = self.make_branch_builder('source', format='2a')
708
 
        builder.start_series()
709
 
        builder.build_snapshot('1', None, [
710
 
            ('add', ('', 'root-id', 'directory', '')),
711
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
712
 
        builder.build_snapshot('2', ['1'], [
713
 
            ('modify', ('file-id', 'content-2\n'))])
714
 
        builder.finish_series()
715
 
        source = builder.get_branch()
716
 
        target = self.make_repository('target', format='2a')
717
 
        target.fetch(source.repository)
718
 
        target.lock_read()
719
 
        self.addCleanup(target.unlock)
720
 
        details = target.texts._index.get_build_details(
721
 
            [('file-id', '1',), ('file-id', '2',)])
722
 
        file_1_details = details[('file-id', '1')]
723
 
        file_2_details = details[('file-id', '2')]
724
 
        # The index, and what to read off disk, should be the same for both
725
 
        # versions of the file.
726
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
727
 
 
728
 
    def test_fetch_combines_groups(self):
729
 
        builder = self.make_branch_builder('source', format='2a')
730
 
        builder.start_series()
731
 
        builder.build_snapshot('1', None, [
732
 
            ('add', ('', 'root-id', 'directory', '')),
733
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
734
 
        builder.build_snapshot('2', ['1'], [
735
 
            ('modify', ('file-id', 'content-2\n'))])
736
 
        builder.finish_series()
737
 
        source = builder.get_branch()
738
 
        target = self.make_repository('target', format='2a')
739
 
        target.fetch(source.repository)
740
 
        target.lock_read()
741
 
        self.addCleanup(target.unlock)
742
 
        details = target.texts._index.get_build_details(
743
 
            [('file-id', '1',), ('file-id', '2',)])
744
 
        file_1_details = details[('file-id', '1')]
745
 
        file_2_details = details[('file-id', '2')]
 
538
    def test_chk_bytes_uses_custom_btree_parser(self):
 
539
        mt = self.make_branch_and_memory_tree('test', format='2a')
 
540
        mt.lock_write()
 
541
        self.addCleanup(mt.unlock)
 
542
        mt.add([''], [b'root-id'])
 
543
        mt.commit('first')
 
544
        index = mt.branch.repository.chk_bytes._index._graph_index._indices[0]
 
545
        self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
 
546
        # It should also work if we re-open the repo
 
547
        repo = mt.branch.repository.controldir.open_repository()
 
548
        repo.lock_read()
 
549
        self.addCleanup(repo.unlock)
 
550
        index = repo.chk_bytes._index._graph_index._indices[0]
 
551
        self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
 
552
 
 
553
    def test_fetch_combines_groups(self):
 
554
        builder = self.make_branch_builder('source', format='2a')
 
555
        builder.start_series()
 
556
        builder.build_snapshot(None, [
 
557
            ('add', ('', b'root-id', 'directory', '')),
 
558
            ('add', ('file', b'file-id', 'file', b'content\n'))],
 
559
            revision_id=b'1')
 
560
        builder.build_snapshot([b'1'], [
 
561
            ('modify', ('file', b'content-2\n'))],
 
562
            revision_id=b'2')
 
563
        builder.finish_series()
 
564
        source = builder.get_branch()
 
565
        target = self.make_repository('target', format='2a')
 
566
        target.fetch(source.repository)
 
567
        target.lock_read()
 
568
        self.addCleanup(target.unlock)
 
569
        details = target.texts._index.get_build_details(
 
570
            [(b'file-id', b'1',), (b'file-id', b'2',)])
 
571
        file_1_details = details[(b'file-id', b'1')]
 
572
        file_2_details = details[(b'file-id', b'2')]
 
573
        # The index, and what to read off disk, should be the same for both
 
574
        # versions of the file.
 
575
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
576
 
 
577
    def test_fetch_combines_groups(self):
 
578
        builder = self.make_branch_builder('source', format='2a')
 
579
        builder.start_series()
 
580
        builder.build_snapshot(None, [
 
581
            ('add', ('', b'root-id', 'directory', '')),
 
582
            ('add', ('file', b'file-id', 'file', 'content\n'))],
 
583
            revision_id=b'1')
 
584
        builder.build_snapshot([b'1'], [
 
585
            ('modify', ('file', b'content-2\n'))],
 
586
            revision_id=b'2')
 
587
        builder.finish_series()
 
588
        source = builder.get_branch()
 
589
        target = self.make_repository('target', format='2a')
 
590
        target.fetch(source.repository)
 
591
        target.lock_read()
 
592
        self.addCleanup(target.unlock)
 
593
        details = target.texts._index.get_build_details(
 
594
            [(b'file-id', b'1',), (b'file-id', b'2',)])
 
595
        file_1_details = details[(b'file-id', b'1')]
 
596
        file_2_details = details[(b'file-id', b'2')]
 
597
        # The index, and what to read off disk, should be the same for both
 
598
        # versions of the file.
 
599
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
600
 
 
601
    def test_fetch_combines_groups(self):
 
602
        builder = self.make_branch_builder('source', format='2a')
 
603
        builder.start_series()
 
604
        builder.build_snapshot(None, [
 
605
            ('add', ('', b'root-id', 'directory', '')),
 
606
            ('add', ('file', b'file-id', 'file', 'content\n'))],
 
607
            revision_id=b'1')
 
608
        builder.build_snapshot([b'1'], [
 
609
            ('modify', ('file', b'content-2\n'))],
 
610
            revision_id=b'2')
 
611
        builder.finish_series()
 
612
        source = builder.get_branch()
 
613
        target = self.make_repository('target', format='2a')
 
614
        target.fetch(source.repository)
 
615
        target.lock_read()
 
616
        self.addCleanup(target.unlock)
 
617
        details = target.texts._index.get_build_details(
 
618
            [(b'file-id', b'1',), (b'file-id', b'2',)])
 
619
        file_1_details = details[(b'file-id', b'1')]
 
620
        file_2_details = details[(b'file-id', b'2')]
746
621
        # The index, and what to read off disk, should be the same for both
747
622
        # versions of the file.
748
623
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
754
629
    def test_inventories_use_chk_map_with_parent_base_dict(self):
755
630
        tree = self.make_branch_and_memory_tree('repo', format="2a")
756
631
        tree.lock_write()
757
 
        tree.add([''], ['TREE_ROOT'])
 
632
        tree.add([''], [b'TREE_ROOT'])
758
633
        revid = tree.commit("foo")
759
634
        tree.unlock()
760
635
        tree.lock_read()
775
650
        tree = self.make_branch_and_memory_tree('tree', format='2a')
776
651
        tree.lock_write()
777
652
        self.addCleanup(tree.unlock)
778
 
        tree.add([''], ['TREE_ROOT'])
 
653
        tree.add([''], [b'TREE_ROOT'])
779
654
        for pos in range(20):
780
655
            tree.commit(str(pos))
781
656
 
783
658
        tree = self.make_branch_and_memory_tree('tree', format='2a')
784
659
        tree.lock_write()
785
660
        self.addCleanup(tree.unlock)
786
 
        tree.add([''], ['TREE_ROOT'])
 
661
        tree.add([''], [b'TREE_ROOT'])
787
662
        # 1 commit to leave untouched
788
663
        tree.commit('1')
789
664
        to_keep = tree.branch.repository._pack_collection.names()
812
687
        target = self.make_repository('target', format='rich-root-pack')
813
688
        stream = source._get_source(target._format)
814
689
        # We don't want the child GroupCHKStreamSource
815
 
        self.assertIs(type(stream), repository.StreamSource)
 
690
        self.assertIs(type(stream), vf_repository.StreamSource)
816
691
 
817
692
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
818
693
        source_builder = self.make_branch_builder('source',
819
694
                            format='2a')
820
695
        # We have to build a fairly large tree, so that we are sure the chk
821
696
        # pages will have split into multiple pages.
822
 
        entries = [('add', ('', 'a-root-id', 'directory', None))]
 
697
        entries = [('add', ('', b'a-root-id', 'directory', None))]
823
698
        for i in 'abcdefghijklmnopqrstuvwxyz123456789':
824
699
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
825
700
                fname = i + j
826
 
                fid = fname + '-id'
 
701
                fid = fname.encode('utf-8') + b'-id'
827
702
                content = 'content for %s\n' % (fname,)
828
703
                entries.append(('add', (fname, fid, 'file', content)))
829
704
        source_builder.start_series()
830
 
        source_builder.build_snapshot('rev-1', None, entries)
 
705
        source_builder.build_snapshot(None, entries, revision_id=b'rev-1')
831
706
        # Now change a few of them, so we get a few new pages for the second
832
707
        # revision
833
 
        source_builder.build_snapshot('rev-2', ['rev-1'], [
834
 
            ('modify', ('aa-id', 'new content for aa-id\n')),
835
 
            ('modify', ('cc-id', 'new content for cc-id\n')),
836
 
            ('modify', ('zz-id', 'new content for zz-id\n')),
837
 
            ])
 
708
        source_builder.build_snapshot([b'rev-1'], [
 
709
            ('modify', ('aa', b'new content for aa-id\n')),
 
710
            ('modify', ('cc', b'new content for cc-id\n')),
 
711
            ('modify', ('zz', b'new content for zz-id\n')),
 
712
            ], revision_id=b'rev-2')
838
713
        source_builder.finish_series()
839
714
        source_branch = source_builder.get_branch()
840
715
        source_branch.lock_read()
845
720
 
846
721
        # On a regular pass, getting the inventories and chk pages for rev-2
847
722
        # would only get the newly created chk pages
848
 
        search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1,
849
 
                                    set(['rev-2']))
 
723
        search = vf_search.SearchResult({b'rev-2'}, {b'rev-1'}, 1,
 
724
                                    {b'rev-2'})
850
725
        simple_chk_records = []
851
726
        for vf_name, substream in source.get_stream(search):
852
727
            if vf_name == 'chk_bytes':
894
769
        source = self.make_repository('source', format='pack-0.92')
895
770
        target = self.make_repository('target', format='pack-0.92')
896
771
        stream_source = source._get_source(target._format)
897
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
772
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
898
773
 
899
774
    def test_source_to_exact_pack_rich_root_pack(self):
900
775
        source = self.make_repository('source', format='rich-root-pack')
901
776
        target = self.make_repository('target', format='rich-root-pack')
902
777
        stream_source = source._get_source(target._format)
903
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
778
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
904
779
 
905
780
    def test_source_to_exact_pack_19(self):
906
781
        source = self.make_repository('source', format='1.9')
907
782
        target = self.make_repository('target', format='1.9')
908
783
        stream_source = source._get_source(target._format)
909
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
784
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
910
785
 
911
786
    def test_source_to_exact_pack_19_rich_root(self):
912
787
        source = self.make_repository('source', format='1.9-rich-root')
913
788
        target = self.make_repository('target', format='1.9-rich-root')
914
789
        stream_source = source._get_source(target._format)
915
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
790
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
916
791
 
917
792
    def test_source_to_remote_exact_pack_19(self):
918
793
        trans = self.make_smart_server('target')
921
796
        target = self.make_repository('target', format='1.9')
922
797
        target = repository.Repository.open(trans.base)
923
798
        stream_source = source._get_source(target._format)
924
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
799
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
925
800
 
926
801
    def test_stream_source_to_non_exact(self):
927
802
        source = self.make_repository('source', format='pack-0.92')
928
803
        target = self.make_repository('target', format='1.9')
929
804
        stream = source._get_source(target._format)
930
 
        self.assertIs(type(stream), repository.StreamSource)
 
805
        self.assertIs(type(stream), vf_repository.StreamSource)
931
806
 
932
807
    def test_stream_source_to_non_exact_rich_root(self):
933
808
        source = self.make_repository('source', format='1.9')
934
809
        target = self.make_repository('target', format='1.9-rich-root')
935
810
        stream = source._get_source(target._format)
936
 
        self.assertIs(type(stream), repository.StreamSource)
 
811
        self.assertIs(type(stream), vf_repository.StreamSource)
937
812
 
938
813
    def test_source_to_remote_non_exact_pack_19(self):
939
814
        trans = self.make_smart_server('target')
942
817
        target = self.make_repository('target', format='1.6')
943
818
        target = repository.Repository.open(trans.base)
944
819
        stream_source = source._get_source(target._format)
945
 
        self.assertIs(type(stream_source), repository.StreamSource)
 
820
        self.assertIs(type(stream_source), vf_repository.StreamSource)
946
821
 
947
822
    def test_stream_source_to_knit(self):
948
823
        source = self.make_repository('source', format='pack-0.92')
949
824
        target = self.make_repository('target', format='dirstate')
950
825
        stream = source._get_source(target._format)
951
 
        self.assertIs(type(stream), repository.StreamSource)
 
826
        self.assertIs(type(stream), vf_repository.StreamSource)
952
827
 
953
828
 
954
829
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
956
831
 
957
832
    def setUp(self):
958
833
        super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
959
 
        self.builder = self.make_branch_builder('source',
960
 
            format='development6-rich-root')
 
834
        self.builder = self.make_branch_builder('source')
961
835
        self.builder.start_series()
962
 
        self.builder.build_snapshot('initial', None,
963
 
            [('add', ('', 'tree-root', 'directory', None))])
 
836
        self.builder.build_snapshot(None,
 
837
            [('add', ('', b'tree-root', 'directory', None))],
 
838
            revision_id=b'initial')
964
839
        self.repo = self.builder.get_branch().repository
965
840
        self.addCleanup(self.builder.finish_series)
966
841
 
969
844
            sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
970
845
 
971
846
    def test_simple(self):
972
 
        self.builder.build_snapshot('revid1', None, [])
973
 
        self.builder.build_snapshot('revid2', ['revid1'], [])
974
 
        rev_set = ['revid2']
975
 
        self.assertParentIds(['revid1'], rev_set)
 
847
        self.builder.build_snapshot(None, [], revision_id=b'revid1')
 
848
        self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2')
 
849
        rev_set = [b'revid2']
 
850
        self.assertParentIds([b'revid1'], rev_set)
976
851
 
977
852
    def test_not_first_parent(self):
978
 
        self.builder.build_snapshot('revid1', None, [])
979
 
        self.builder.build_snapshot('revid2', ['revid1'], [])
980
 
        self.builder.build_snapshot('revid3', ['revid2'], [])
981
 
        rev_set = ['revid3', 'revid2']
982
 
        self.assertParentIds(['revid1'], rev_set)
 
853
        self.builder.build_snapshot(None, [], revision_id=b'revid1')
 
854
        self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2')
 
855
        self.builder.build_snapshot([b'revid2'], [], revision_id=b'revid3')
 
856
        rev_set = [b'revid3', b'revid2']
 
857
        self.assertParentIds([b'revid1'], rev_set)
983
858
 
984
859
    def test_not_null(self):
985
 
        rev_set = ['initial']
 
860
        rev_set = [b'initial']
986
861
        self.assertParentIds([], rev_set)
987
862
 
988
863
    def test_not_null_set(self):
989
 
        self.builder.build_snapshot('revid1', None, [])
 
864
        self.builder.build_snapshot(None, [], revision_id=b'revid1')
990
865
        rev_set = [_mod_revision.NULL_REVISION]
991
866
        self.assertParentIds([], rev_set)
992
867
 
993
868
    def test_ghost(self):
994
 
        self.builder.build_snapshot('revid1', None, [])
995
 
        rev_set = ['ghost', 'revid1']
996
 
        self.assertParentIds(['initial'], rev_set)
 
869
        self.builder.build_snapshot(None, [], revision_id=b'revid1')
 
870
        rev_set = [b'ghost', b'revid1']
 
871
        self.assertParentIds([b'initial'], rev_set)
997
872
 
998
873
    def test_ghost_parent(self):
999
 
        self.builder.build_snapshot('revid1', None, [])
1000
 
        self.builder.build_snapshot('revid2', ['revid1', 'ghost'], [])
1001
 
        rev_set = ['revid2', 'revid1']
1002
 
        self.assertParentIds(['ghost', 'initial'], rev_set)
 
874
        self.builder.build_snapshot(None, [], revision_id=b'revid1')
 
875
        self.builder.build_snapshot([b'revid1', b'ghost'], [], revision_id=b'revid2')
 
876
        rev_set = [b'revid2', b'revid1']
 
877
        self.assertParentIds([b'ghost', b'initial'], rev_set)
1003
878
 
1004
879
    def test_righthand_parent(self):
1005
 
        self.builder.build_snapshot('revid1', None, [])
1006
 
        self.builder.build_snapshot('revid2a', ['revid1'], [])
1007
 
        self.builder.build_snapshot('revid2b', ['revid1'], [])
1008
 
        self.builder.build_snapshot('revid3', ['revid2a', 'revid2b'], [])
1009
 
        rev_set = ['revid3', 'revid2a']
1010
 
        self.assertParentIds(['revid1', 'revid2b'], rev_set)
 
880
        self.builder.build_snapshot(None, [], revision_id=b'revid1')
 
881
        self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2a')
 
882
        self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2b')
 
883
        self.builder.build_snapshot([b'revid2a', b'revid2b'], [],
 
884
                                    revision_id=b'revid3')
 
885
        rev_set = [b'revid3', b'revid2a']
 
886
        self.assertParentIds([b'revid1', b'revid2b'], rev_set)
1011
887
 
1012
888
 
1013
889
class TestWithBrokenRepo(TestCaseWithTransport):
1025
901
            repo.start_write_group()
1026
902
            cleanups.append(repo.commit_write_group)
1027
903
            # make rev1a: A well-formed revision, containing 'file1'
1028
 
            inv = inventory.Inventory(revision_id='rev1a')
1029
 
            inv.root.revision = 'rev1a'
1030
 
            self.add_file(repo, inv, 'file1', 'rev1a', [])
1031
 
            repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
1032
 
            repo.add_inventory('rev1a', inv, [])
1033
 
            revision = _mod_revision.Revision('rev1a',
 
904
            inv = inventory.Inventory(revision_id=b'rev1a')
 
905
            inv.root.revision = b'rev1a'
 
906
            self.add_file(repo, inv, 'file1', b'rev1a', [])
 
907
            repo.texts.add_lines((inv.root.file_id, b'rev1a'), [], [])
 
908
            repo.add_inventory(b'rev1a', inv, [])
 
909
            revision = _mod_revision.Revision(b'rev1a',
1034
910
                committer='jrandom@example.com', timestamp=0,
1035
911
                inventory_sha1='', timezone=0, message='foo', parent_ids=[])
1036
 
            repo.add_revision('rev1a',revision, inv)
 
912
            repo.add_revision(b'rev1a', revision, inv)
1037
913
 
1038
914
            # make rev1b, which has no Revision, but has an Inventory, and
1039
915
            # file1
1040
 
            inv = inventory.Inventory(revision_id='rev1b')
1041
 
            inv.root.revision = 'rev1b'
1042
 
            self.add_file(repo, inv, 'file1', 'rev1b', [])
1043
 
            repo.add_inventory('rev1b', inv, [])
 
916
            inv = inventory.Inventory(revision_id=b'rev1b')
 
917
            inv.root.revision = b'rev1b'
 
918
            self.add_file(repo, inv, 'file1', b'rev1b', [])
 
919
            repo.add_inventory(b'rev1b', inv, [])
1044
920
 
1045
921
            # make rev2, with file1 and file2
1046
922
            # file2 is sane
1047
923
            # file1 has 'rev1b' as an ancestor, even though this is not
1048
924
            # mentioned by 'rev1a', making it an unreferenced ancestor
1049
925
            inv = inventory.Inventory()
1050
 
            self.add_file(repo, inv, 'file1', 'rev2', ['rev1a', 'rev1b'])
1051
 
            self.add_file(repo, inv, 'file2', 'rev2', [])
1052
 
            self.add_revision(repo, 'rev2', inv, ['rev1a'])
 
926
            self.add_file(repo, inv, 'file1', b'rev2', [b'rev1a', b'rev1b'])
 
927
            self.add_file(repo, inv, 'file2', b'rev2', [])
 
928
            self.add_revision(repo, b'rev2', inv, [b'rev1a'])
1053
929
 
1054
930
            # make ghost revision rev1c
1055
931
            inv = inventory.Inventory()
1056
 
            self.add_file(repo, inv, 'file2', 'rev1c', [])
 
932
            self.add_file(repo, inv, 'file2', b'rev1c', [])
1057
933
 
1058
934
            # make rev3 with file2
1059
935
            # file2 refers to 'rev1c', which is a ghost in this repository, so
1060
936
            # file2 cannot have rev1c as its ancestor.
1061
937
            inv = inventory.Inventory()
1062
 
            self.add_file(repo, inv, 'file2', 'rev3', ['rev1c'])
1063
 
            self.add_revision(repo, 'rev3', inv, ['rev1c'])
 
938
            self.add_file(repo, inv, 'file2', b'rev3', [b'rev1c'])
 
939
            self.add_revision(repo, b'rev3', inv, [b'rev1c'])
1064
940
            return repo
1065
941
        finally:
1066
942
            for cleanup in reversed(cleanups):
1074
950
        revision = _mod_revision.Revision(revision_id,
1075
951
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
1076
952
            timezone=0, message='foo', parent_ids=parent_ids)
1077
 
        repo.add_revision(revision_id,revision, inv)
 
953
        repo.add_revision(revision_id, revision, inv)
1078
954
 
1079
955
    def add_file(self, repo, inv, filename, revision, parents):
1080
 
        file_id = filename + '-id'
1081
 
        entry = inventory.InventoryFile(file_id, filename, 'TREE_ROOT')
 
956
        file_id = filename + b'-id'
 
957
        entry = inventory.InventoryFile(file_id, filename, b'TREE_ROOT')
1082
958
        entry.revision = revision
1083
959
        entry.text_size = 0
1084
960
        inv.add(entry)
1085
961
        text_key = (file_id, revision)
1086
962
        parent_keys = [(file_id, parent) for parent in parents]
1087
 
        repo.texts.add_lines(text_key, parent_keys, ['line\n'])
 
963
        repo.texts.add_lines(text_key, parent_keys, [b'line\n'])
1088
964
 
1089
965
    def test_insert_from_broken_repo(self):
1090
966
        """Inserting a data stream from a broken repository won't silently
1100
976
            return
1101
977
        empty_repo.lock_read()
1102
978
        self.addCleanup(empty_repo.unlock)
1103
 
        text = empty_repo.texts.get_record_stream(
1104
 
            [('file2-id', 'rev3')], 'topological', True).next()
1105
 
        self.assertEqual('line\n', text.get_bytes_as('fulltext'))
 
979
        text = next(empty_repo.texts.get_record_stream(
 
980
            [(b'file2-id', b'rev3')], 'topological', True))
 
981
        self.assertEqual(b'line\n', text.get_bytes_as('fulltext'))
1106
982
 
1107
983
 
1108
984
class TestRepositoryPackCollection(TestCaseWithTransport):
1109
985
 
1110
986
    def get_format(self):
1111
 
        return bzrdir.format_registry.make_bzrdir('pack-0.92')
 
987
        return controldir.format_registry.make_controldir('pack-0.92')
1112
988
 
1113
989
    def get_packs(self):
1114
990
        format = self.get_format()
1153
1029
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1154
1030
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1155
1031
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1156
 
        res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
 
1032
        res = packs._clear_obsolete_packs(preserve={'a-pack'})
1157
1033
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1158
1034
        self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
1159
1035
                         sorted(obsolete_pack_trans.list_dir('.')))
1207
1083
                         sorted(packs._pack_transport.list_dir('.')))
1208
1084
        # names[0] should not be present in the index anymore
1209
1085
        self.assertEqual(names[1:],
1210
 
            sorted(set([osutils.splitext(n)[0] for n in
1211
 
                        packs._index_transport.list_dir('.')])))
 
1086
            sorted({osutils.splitext(n)[0] for n in
 
1087
                        packs._index_transport.list_dir('.')}))
 
1088
 
 
1089
    def test__obsolete_packs_missing_directory(self):
 
1090
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1091
        r.control_transport.rmdir('obsolete_packs')
 
1092
        names = packs.names()
 
1093
        pack = packs.get_pack_by_name(names[0])
 
1094
        # Schedule this one for removal
 
1095
        packs._remove_pack_from_memory(pack)
 
1096
        # Now trigger the obsoletion, and ensure that all the remaining files
 
1097
        # are still renamed
 
1098
        packs._obsolete_packs([pack])
 
1099
        self.assertEqual([n + '.pack' for n in names[1:]],
 
1100
                         sorted(packs._pack_transport.list_dir('.')))
 
1101
        # names[0] should not be present in the index anymore
 
1102
        self.assertEqual(names[1:],
 
1103
            sorted({osutils.splitext(n)[0] for n in
 
1104
                        packs._index_transport.list_dir('.')}))
1212
1105
 
1213
1106
    def test_pack_distribution_zero(self):
1214
1107
        packs = self.get_packs()
1390
1283
        # and remove another pack (via _remove_pack_from_memory)
1391
1284
        orig_names = packs.names()
1392
1285
        orig_at_load = packs._packs_at_load
1393
 
        to_remove_name = iter(orig_names).next()
 
1286
        to_remove_name = next(iter(orig_names))
1394
1287
        r.start_write_group()
1395
1288
        self.addCleanup(r.abort_write_group)
1396
1289
        r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
1404
1297
        packs._remove_pack_from_memory(removed_pack)
1405
1298
        names = packs.names()
1406
1299
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1407
 
        new_names = set([x[0][0] for x in new_nodes])
 
1300
        new_names = {x[0][0] for x in new_nodes}
1408
1301
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1409
1302
        self.assertEqual(set(names) - set(orig_names), new_names)
1410
 
        self.assertEqual(set([new_pack.name]), new_names)
 
1303
        self.assertEqual({new_pack.name}, new_names)
1411
1304
        self.assertEqual([to_remove_name],
1412
1305
                         sorted([x[0][0] for x in deleted_nodes]))
1413
1306
        packs.reload_pack_names()
1415
1308
        self.assertEqual(orig_at_load, packs._packs_at_load)
1416
1309
        self.assertEqual(names, reloaded_names)
1417
1310
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1418
 
        new_names = set([x[0][0] for x in new_nodes])
 
1311
        new_names = {x[0][0] for x in new_nodes}
1419
1312
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1420
1313
        self.assertEqual(set(names) - set(orig_names), new_names)
1421
 
        self.assertEqual(set([new_pack.name]), new_names)
 
1314
        self.assertEqual({new_pack.name}, new_names)
1422
1315
        self.assertEqual([to_remove_name],
1423
1316
                         sorted([x[0][0] for x in deleted_nodes]))
1424
1317
 
1464
1357
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1465
1358
        # obsolete_packs will also have stuff like .rix and .iix present.
1466
1359
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
1467
 
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
 
1360
        obsolete_names = {osutils.splitext(n)[0] for n in obsolete_packs}
1468
1361
        self.assertEqual([pack.name], sorted(obsolete_names))
1469
1362
 
1470
1363
    def test__save_pack_names_already_obsoleted(self):
1482
1375
        # Note that while we set clear_obsolete_packs=True, it should not
1483
1376
        # delete a pack file that we have also scheduled for obsoletion.
1484
1377
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
1485
 
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
 
1378
        obsolete_names = {osutils.splitext(n)[0] for n in obsolete_packs}
1486
1379
        self.assertEqual([pack.name], sorted(obsolete_names))
1487
1380
 
 
1381
    def test_pack_no_obsolete_packs_directory(self):
 
1382
        """Bug #314314, don't fail if obsolete_packs directory does
 
1383
        not exist."""
 
1384
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1385
        r.control_transport.rmdir('obsolete_packs')
 
1386
        packs._clear_obsolete_packs()
1488
1387
 
1489
1388
 
1490
1389
class TestPack(TestCaseWithTransport):
1574
1473
    def test_pack_optimizes_pack_order(self):
1575
1474
        builder = self.make_branch_builder('.', format="1.9")
1576
1475
        builder.start_series()
1577
 
        builder.build_snapshot('A', None, [
1578
 
            ('add', ('', 'root-id', 'directory', None)),
1579
 
            ('add', ('f', 'f-id', 'file', 'content\n'))])
1580
 
        builder.build_snapshot('B', ['A'],
1581
 
            [('modify', ('f-id', 'new-content\n'))])
1582
 
        builder.build_snapshot('C', ['B'],
1583
 
            [('modify', ('f-id', 'third-content\n'))])
1584
 
        builder.build_snapshot('D', ['C'],
1585
 
            [('modify', ('f-id', 'fourth-content\n'))])
 
1476
        builder.build_snapshot(None, [
 
1477
            ('add', ('', b'root-id', 'directory', None)),
 
1478
            ('add', ('f', b'f-id', 'file', b'content\n'))],
 
1479
            revision_id=b'A')
 
1480
        builder.build_snapshot([b'A'],
 
1481
            [('modify', ('f', b'new-content\n'))],
 
1482
            revision_id=b'B')
 
1483
        builder.build_snapshot([b'B'],
 
1484
            [('modify', ('f', b'third-content\n'))],
 
1485
            revision_id=b'C')
 
1486
        builder.build_snapshot([b'C'],
 
1487
            [('modify', ('f', b'fourth-content\n'))],
 
1488
            revision_id=b'D')
1586
1489
        b = builder.get_branch()
1587
1490
        b.lock_read()
1588
1491
        builder.finish_series()
1591
1494
        # Because of how they were built, they correspond to
1592
1495
        # ['D', 'C', 'B', 'A']
1593
1496
        packs = b.repository._pack_collection.packs
1594
 
        packer = pack_repo.Packer(b.repository._pack_collection,
 
1497
        packer = knitpack_repo.KnitPacker(b.repository._pack_collection,
1595
1498
                                  packs, 'testing',
1596
 
                                  revision_ids=['B', 'C'])
 
1499
                                  revision_ids=[b'B', b'C'])
1597
1500
        # Now, when we are copying the B & C revisions, their pack files should
1598
1501
        # be moved to the front of the stack
1599
1502
        # The new ordering moves B & C to the front of the .packs attribute,
1611
1514
        return repo._pack_collection
1612
1515
 
1613
1516
    def test_open_pack_will_optimise(self):
1614
 
        packer = pack_repo.OptimisingPacker(self.get_pack_collection(),
 
1517
        packer = knitpack_repo.OptimisingKnitPacker(self.get_pack_collection(),
1615
1518
                                            [], '.test')
1616
1519
        new_pack = packer.open_pack()
1617
1520
        self.addCleanup(new_pack.abort) # ensure cleanup
1622
1525
        self.assertTrue(new_pack.signature_index._optimize_for_size)
1623
1526
 
1624
1527
 
 
1528
class TestGCCHKPacker(TestCaseWithTransport):
 
1529
 
 
1530
    def make_abc_branch(self):
 
1531
        builder = self.make_branch_builder('source')
 
1532
        builder.start_series()
 
1533
        builder.build_snapshot(None, [
 
1534
            ('add', ('', b'root-id', 'directory', None)),
 
1535
            ('add', ('file', b'file-id', 'file', b'content\n')),
 
1536
            ], revision_id=b'A')
 
1537
        builder.build_snapshot([b'A'], [
 
1538
            ('add', ('dir', b'dir-id', 'directory', None))],
 
1539
            revision_id=b'B')
 
1540
        builder.build_snapshot([b'B'], [
 
1541
            ('modify', ('file', b'new content\n'))],
 
1542
            revision_id=b'C')
 
1543
        builder.finish_series()
 
1544
        return builder.get_branch()
 
1545
 
 
1546
    def make_branch_with_disjoint_inventory_and_revision(self):
 
1547
        """a repo with separate packs for a revisions Revision and Inventory.
 
1548
 
 
1549
        There will be one pack file that holds the Revision content, and one
 
1550
        for the Inventory content.
 
1551
 
 
1552
        :return: (repository,
 
1553
                  pack_name_with_rev_A_Revision,
 
1554
                  pack_name_with_rev_A_Inventory,
 
1555
                  pack_name_with_rev_C_content)
 
1556
        """
 
1557
        b_source = self.make_abc_branch()
 
1558
        b_base = b_source.controldir.sprout('base', revision_id=b'A').open_branch()
 
1559
        b_stacked = b_base.controldir.sprout('stacked', stacked=True).open_branch()
 
1560
        b_stacked.lock_write()
 
1561
        self.addCleanup(b_stacked.unlock)
 
1562
        b_stacked.fetch(b_source, 'B')
 
1563
        # Now re-open the stacked repo directly (no fallbacks) so that we can
 
1564
        # fill in the A rev.
 
1565
        repo_not_stacked = b_stacked.controldir.open_repository()
 
1566
        repo_not_stacked.lock_write()
 
1567
        self.addCleanup(repo_not_stacked.unlock)
 
1568
        # Now we should have a pack file with A's inventory, but not its
 
1569
        # Revision
 
1570
        self.assertEqual([('A',), ('B',)],
 
1571
                         sorted(repo_not_stacked.inventories.keys()))
 
1572
        self.assertEqual([('B',)],
 
1573
                         sorted(repo_not_stacked.revisions.keys()))
 
1574
        stacked_pack_names = repo_not_stacked._pack_collection.names()
 
1575
        # We have a couple names here, figure out which has A's inventory
 
1576
        for name in stacked_pack_names:
 
1577
            pack = repo_not_stacked._pack_collection.get_pack_by_name(name)
 
1578
            keys = [n[1] for n in pack.inventory_index.iter_all_entries()]
 
1579
            if ('A',) in keys:
 
1580
                inv_a_pack_name = name
 
1581
                break
 
1582
        else:
 
1583
            self.fail('Could not find pack containing A\'s inventory')
 
1584
        repo_not_stacked.fetch(b_source.repository, 'A')
 
1585
        self.assertEqual([('A',), ('B',)],
 
1586
                         sorted(repo_not_stacked.revisions.keys()))
 
1587
        new_pack_names = set(repo_not_stacked._pack_collection.names())
 
1588
        rev_a_pack_names = new_pack_names.difference(stacked_pack_names)
 
1589
        self.assertEqual(1, len(rev_a_pack_names))
 
1590
        rev_a_pack_name = list(rev_a_pack_names)[0]
 
1591
        # Now fetch 'C', so we have a couple pack files to join
 
1592
        repo_not_stacked.fetch(b_source.repository, 'C')
 
1593
        rev_c_pack_names = set(repo_not_stacked._pack_collection.names())
 
1594
        rev_c_pack_names = rev_c_pack_names.difference(new_pack_names)
 
1595
        self.assertEqual(1, len(rev_c_pack_names))
 
1596
        rev_c_pack_name = list(rev_c_pack_names)[0]
 
1597
        return (repo_not_stacked, rev_a_pack_name, inv_a_pack_name,
 
1598
                rev_c_pack_name)
 
1599
 
 
1600
    def test_pack_with_distant_inventories(self):
 
1601
        # See https://bugs.launchpad.net/bzr/+bug/437003
 
1602
        # When repacking, it is possible to have an inventory in a different
 
1603
        # pack file than the associated revision. An autopack can then come
 
1604
        # along, and miss that inventory, and complain.
 
1605
        (repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
 
1606
         ) = self.make_branch_with_disjoint_inventory_and_revision()
 
1607
        a_pack = repo._pack_collection.get_pack_by_name(rev_a_pack_name)
 
1608
        c_pack = repo._pack_collection.get_pack_by_name(rev_c_pack_name)
 
1609
        packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
 
1610
                    [a_pack, c_pack], '.test-pack')
 
1611
        # This would raise ValueError in bug #437003, but should not raise an
 
1612
        # error once fixed.
 
1613
        packer.pack()
 
1614
 
 
1615
    def test_pack_with_missing_inventory(self):
 
1616
        # Similar to test_pack_with_missing_inventory, but this time, we force
 
1617
        # the A inventory to actually be gone from the repository.
 
1618
        (repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
 
1619
         ) = self.make_branch_with_disjoint_inventory_and_revision()
 
1620
        inv_a_pack = repo._pack_collection.get_pack_by_name(inv_a_pack_name)
 
1621
        repo._pack_collection._remove_pack_from_memory(inv_a_pack)
 
1622
        packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
 
1623
            repo._pack_collection.all_packs(), '.test-pack')
 
1624
        e = self.assertRaises(ValueError, packer.pack)
 
1625
        packer.new_pack.abort()
 
1626
        self.assertContainsRe(str(e),
 
1627
            r"We are missing inventories for revisions: .*'A'")
 
1628
 
 
1629
 
1625
1630
class TestCrossFormatPacks(TestCaseWithTransport):
1626
1631
 
1627
1632
    def log_pack(self, hint=None):
1642
1647
        self.addCleanup(target.unlock)
1643
1648
        source = source_tree.branch.repository._get_source(target._format)
1644
1649
        self.orig_pack = target.pack
1645
 
        target.pack = self.log_pack
 
1650
        self.overrideAttr(target, "pack", self.log_pack)
1646
1651
        search = target.search_missing_revision_ids(
1647
 
            source_tree.branch.repository, tip)
 
1652
            source_tree.branch.repository, revision_ids=[tip])
1648
1653
        stream = source.get_stream(search)
1649
1654
        from_format = source_tree.branch.repository._format
1650
1655
        sink = target._get_sink()
1666
1671
        self.addCleanup(target.unlock)
1667
1672
        source = source_tree.branch.repository
1668
1673
        self.orig_pack = target.pack
1669
 
        target.pack = self.log_pack
 
1674
        self.overrideAttr(target, "pack", self.log_pack)
1670
1675
        target.fetch(source)
1671
1676
        if expect_pack_called:
1672
1677
            self.assertLength(1, self.calls)
1700
1705
    def test_IDS_format_same_no(self):
1701
1706
        # When the formats are the same, pack is not called.
1702
1707
        self.run_fetch('2a', '2a', False)
 
1708
 
 
1709
 
 
1710
class Test_LazyListJoin(tests.TestCase):
 
1711
 
 
1712
    def test__repr__(self):
 
1713
        lazy = repository._LazyListJoin(['a'], ['b'])
 
1714
        self.assertEqual("breezy.repository._LazyListJoin((['a'], ['b']))",
 
1715
                         repr(lazy))
 
1716
 
 
1717
 
 
1718
class TestFeatures(tests.TestCaseWithTransport):
 
1719
 
 
1720
    def test_open_with_present_feature(self):
 
1721
        self.addCleanup(
 
1722
            bzrrepository.RepositoryFormatMetaDir.unregister_feature,
 
1723
            b"makes-cheese-sandwich")
 
1724
        bzrrepository.RepositoryFormatMetaDir.register_feature(
 
1725
            b"makes-cheese-sandwich")
 
1726
        repo = self.make_repository('.')
 
1727
        repo.lock_write()
 
1728
        repo._format.features[b"makes-cheese-sandwich"] = b"required"
 
1729
        repo._format.check_support_status(False)
 
1730
        repo.unlock()
 
1731
 
 
1732
    def test_open_with_missing_required_feature(self):
 
1733
        repo = self.make_repository('.')
 
1734
        repo.lock_write()
 
1735
        repo._format.features[b"makes-cheese-sandwich"] = b"required"
 
1736
        self.assertRaises(bzrdir.MissingFeature,
 
1737
            repo._format.check_support_status, False)