/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/tests/test_repository.py

  • Committer: Jelmer Vernooij
  • Date: 2017-06-08 23:30:31 UTC
  • mto: This revision was merged to the branch mainline in revision 6690.
  • Revision ID: jelmer@jelmer.uk-20170608233031-3qavls2o7a1pqllj
Update imports.

Show diffs side-by-side

added added

removed removed

Lines of Context:
27
27
import breezy
28
28
from breezy.errors import (
29
29
    UnknownFormatError,
 
30
    UnsupportedFormatError,
30
31
    )
31
32
from breezy import (
32
33
    tests,
57
58
    upgrade,
58
59
    workingtree,
59
60
    )
60
 
from breezy.bzr import (
 
61
from breezy.repofmt import (
61
62
    groupcompress_repo,
62
63
    knitrepo,
63
64
    knitpack_repo,
69
70
 
70
71
    def test_get_set_default_format(self):
71
72
        old_default = controldir.format_registry.get('default')
72
 
        old_default_help = controldir.format_registry.get_help('default')
73
73
        private_default = old_default().repository_format.__class__
74
74
        old_format = repository.format_registry.get_default()
75
75
        self.assertTrue(isinstance(old_format, private_default))
76
 
 
77
76
        def make_sample_bzrdir():
78
77
            my_bzrdir = bzrdir.BzrDirMetaFormat1()
79
78
            my_bzrdir.repository_format = SampleRepositoryFormat()
91
90
        finally:
92
91
            controldir.format_registry.remove('default')
93
92
            controldir.format_registry.remove('sample')
94
 
            controldir.format_registry.register(
95
 
                'default', old_default, old_default_help)
 
93
            controldir.format_registry.register('default', old_default, '')
96
94
        self.assertIsInstance(repository.format_registry.get_default(),
97
95
                              old_format.__class__)
98
96
 
107
105
    @classmethod
108
106
    def get_format_string(cls):
109
107
        """See RepositoryFormat.get_format_string()."""
110
 
        return b"Sample .bzr repository format."
 
108
        return "Sample .bzr repository format."
111
109
 
112
 
    def initialize(self, a_controldir, shared=False):
 
110
    def initialize(self, a_bzrdir, shared=False):
113
111
        """Initialize a repository in a BzrDir"""
114
 
        t = a_controldir.get_repository_transport(self)
 
112
        t = a_bzrdir.get_repository_transport(self)
115
113
        t.put_bytes('format', self.get_format_string())
116
114
        return 'A bzr repository dir'
117
115
 
118
116
    def is_supported(self):
119
117
        return False
120
118
 
121
 
    def open(self, a_controldir, _found=False):
 
119
    def open(self, a_bzrdir, _found=False):
122
120
        return "opened repository."
123
121
 
124
122
 
139
137
        # create a branch with a few known format objects.
140
138
        # this is not quite the same as
141
139
        self.build_tree(["foo/", "bar/"])
142
 
 
143
140
        def check_format(format, url):
144
 
            dir = format._matchingcontroldir.initialize(url)
 
141
            dir = format._matchingbzrdir.initialize(url)
145
142
            format.initialize(dir)
146
 
            found_format = bzrrepository.RepositoryFormatMetaDir.find_format(
147
 
                dir)
 
143
            t = transport.get_transport_from_path(url)
 
144
            found_format = bzrrepository.RepositoryFormatMetaDir.find_format(dir)
148
145
            self.assertIsInstance(found_format, format.__class__)
149
146
        check_format(repository.format_registry.get_default(), "bar")
150
147
 
157
154
    def test_from_string(self):
158
155
        self.assertIsInstance(
159
156
            SampleRepositoryFormat.from_string(
160
 
                b"Sample .bzr repository format."),
 
157
                "Sample .bzr repository format."),
161
158
            SampleRepositoryFormat)
162
159
        self.assertRaises(AssertionError,
163
 
                          SampleRepositoryFormat.from_string,
164
 
                          b"Different .bzr repository format.")
 
160
            SampleRepositoryFormat.from_string,
 
161
                "Different .bzr repository format.")
165
162
 
166
163
    def test_find_format_unknown_format(self):
167
164
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
172
169
 
173
170
    def test_find_format_with_features(self):
174
171
        tree = self.make_branch_and_tree('.', format='2a')
175
 
        tree.branch.repository.update_feature_flags({b"name": b"necessity"})
176
 
        found_format = bzrrepository.RepositoryFormatMetaDir.find_format(
177
 
            tree.controldir)
178
 
        self.assertIsInstance(
179
 
            found_format, bzrrepository.RepositoryFormatMetaDir)
180
 
        self.assertEqual(found_format.features.get(b"name"), b"necessity")
181
 
        self.assertRaises(
182
 
            bzrdir.MissingFeature, found_format.check_support_status, True)
183
 
        self.addCleanup(
184
 
            bzrrepository.RepositoryFormatMetaDir.unregister_feature, b"name")
185
 
        bzrrepository.RepositoryFormatMetaDir.register_feature(b"name")
 
172
        tree.branch.repository.update_feature_flags({"name": "necessity"})
 
173
        found_format = bzrrepository.RepositoryFormatMetaDir.find_format(tree.bzrdir)
 
174
        self.assertIsInstance(found_format, bzrrepository.RepositoryFormatMetaDir)
 
175
        self.assertEqual(found_format.features.get("name"), "necessity")
 
176
        self.assertRaises(errors.MissingFeature, found_format.check_support_status,
 
177
            True)
 
178
        self.addCleanup(bzrrepository.RepositoryFormatMetaDir.unregister_feature,
 
179
            "name")
 
180
        bzrrepository.RepositoryFormatMetaDir.register_feature("name")
186
181
        found_format.check_support_status(True)
187
182
 
188
183
 
195
190
    def test_register_unregister_format(self):
196
191
        format = SampleRepositoryFormat()
197
192
        self.registry.register(format)
198
 
        self.assertEqual(format, self.registry.get(
199
 
            b"Sample .bzr repository format."))
 
193
        self.assertEqual(format, self.registry.get("Sample .bzr repository format."))
200
194
        self.registry.remove(format)
201
 
        self.assertRaises(KeyError, self.registry.get,
202
 
                          b"Sample .bzr repository format.")
 
195
        self.assertRaises(KeyError, self.registry.get, "Sample .bzr repository format.")
203
196
 
204
197
    def test_get_all(self):
205
198
        format = SampleRepositoryFormat()
215
208
 
216
209
    def test_register_extra_lazy(self):
217
210
        self.assertEqual([], self.registry._get_all())
218
 
        self.registry.register_extra_lazy(__name__,
219
 
                                          "SampleExtraRepositoryFormat")
 
211
        self.registry.register_extra_lazy("breezy.tests.test_repository",
 
212
            "SampleExtraRepositoryFormat")
220
213
        formats = self.registry._get_all()
221
214
        self.assertEqual(1, len(formats))
222
215
        self.assertIsInstance(formats[0], SampleExtraRepositoryFormat)
226
219
 
227
220
    def test_attribute__fetch_order(self):
228
221
        """Knits need topological data insertion."""
229
 
        repo = self.make_repository(
230
 
            '.', format=controldir.format_registry.get('knit')())
 
222
        repo = self.make_repository('.',
 
223
                format=controldir.format_registry.get('knit')())
231
224
        self.assertEqual('topological', repo._format._fetch_order)
232
225
 
233
226
    def test_attribute__fetch_uses_deltas(self):
234
227
        """Knits reuse deltas."""
235
 
        repo = self.make_repository(
236
 
            '.', format=controldir.format_registry.get('knit')())
 
228
        repo = self.make_repository('.',
 
229
                format=controldir.format_registry.get('knit')())
237
230
        self.assertEqual(True, repo._format._fetch_uses_deltas)
238
231
 
239
232
    def test_disk_layout(self):
249
242
        # empty revision-store directory
250
243
        # empty weaves directory
251
244
        t = control.get_repository_transport(None)
252
 
        with t.get('format') as f:
253
 
            self.assertEqualDiff(b'Bazaar-NG Knit Repository Format 1',
254
 
                                 f.read())
 
245
        self.assertEqualDiff('Bazaar-NG Knit Repository Format 1',
 
246
                             t.get('format').read())
255
247
        # XXX: no locks left when unlocked at the moment
256
248
        # self.assertEqualDiff('', t.get('lock').read())
257
249
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
258
250
        self.check_knits(t)
259
251
        # Check per-file knits.
260
 
        control.create_branch()
 
252
        branch = control.create_branch()
261
253
        tree = control.create_workingtree()
262
 
        tree.add(['foo'], [b'Nasty-IdC:'], ['file'])
263
 
        tree.put_file_bytes_non_atomic('foo', b'')
264
 
        tree.commit('1st post', rev_id=b'foo')
 
254
        tree.add(['foo'], ['Nasty-IdC:'], ['file'])
 
255
        tree.put_file_bytes_non_atomic('Nasty-IdC:', '')
 
256
        tree.commit('1st post', rev_id='foo')
265
257
        self.assertHasKnit(t, 'knits/e8/%254easty-%2549d%2543%253a',
266
 
                           b'\nfoo fulltext 0 81  :')
 
258
            '\nfoo fulltext 0 81  :')
267
259
 
268
 
    def assertHasKnit(self, t, knit_name, extra_content=b''):
 
260
    def assertHasKnit(self, t, knit_name, extra_content=''):
269
261
        """Assert that knit_name exists on t."""
270
 
        self.assertEqualDiff(b'# bzr knit index 8\n' + extra_content,
 
262
        self.assertEqualDiff('# bzr knit index 8\n' + extra_content,
271
263
                             t.get(knit_name + '.kndx').read())
272
264
 
273
265
    def check_knits(self, t):
278
270
 
279
271
    def test_shared_disk_layout(self):
280
272
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
281
 
        knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
 
273
        repo = knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
282
274
        # we want:
283
275
        # format 'Bazaar-NG Knit Repository Format 1'
284
276
        # lock: is a directory
287
279
        # empty weaves directory
288
280
        # a 'shared-storage' marker file.
289
281
        t = control.get_repository_transport(None)
290
 
        with t.get('format') as f:
291
 
            self.assertEqualDiff(b'Bazaar-NG Knit Repository Format 1',
292
 
                                 f.read())
 
282
        self.assertEqualDiff('Bazaar-NG Knit Repository Format 1',
 
283
                             t.get('format').read())
293
284
        # XXX: no locks left when unlocked at the moment
294
285
        # self.assertEqualDiff('', t.get('lock').read())
295
 
        self.assertEqualDiff(b'', t.get('shared-storage').read())
 
286
        self.assertEqualDiff('', t.get('shared-storage').read())
296
287
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
297
288
        self.check_knits(t)
298
289
 
299
290
    def test_shared_no_tree_disk_layout(self):
300
291
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
301
 
        repo = knitrepo.RepositoryFormatKnit1().initialize(
302
 
            control, shared=True)
 
292
        repo = knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
303
293
        repo.set_make_working_trees(False)
304
294
        # we want:
305
295
        # format 'Bazaar-NG Knit Repository Format 1'
309
299
        # empty weaves directory
310
300
        # a 'shared-storage' marker file.
311
301
        t = control.get_repository_transport(None)
312
 
        with t.get('format') as f:
313
 
            self.assertEqualDiff(b'Bazaar-NG Knit Repository Format 1',
314
 
                                 f.read())
 
302
        self.assertEqualDiff('Bazaar-NG Knit Repository Format 1',
 
303
                             t.get('format').read())
315
304
        # XXX: no locks left when unlocked at the moment
316
305
        # self.assertEqualDiff('', t.get('lock').read())
317
 
        self.assertEqualDiff(b'', t.get('shared-storage').read())
318
 
        self.assertEqualDiff(b'', t.get('no-working-trees').read())
 
306
        self.assertEqualDiff('', t.get('shared-storage').read())
 
307
        self.assertEqualDiff('', t.get('no-working-trees').read())
319
308
        repo.set_make_working_trees(True)
320
309
        self.assertFalse(t.has('no-working-trees'))
321
310
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
328
317
        the whole inventory. So we grab the one from the expected text. Which
329
318
        is valid when the api is not being abused.
330
319
        """
331
 
        repo = self.make_repository(
332
 
            '.', format=controldir.format_registry.get('knit')())
333
 
        inv_xml = b'<inventory format="5">\n</inventory>\n'
334
 
        inv = repo._deserialise_inventory(b'test-rev-id', [inv_xml])
335
 
        self.assertEqual(b'test-rev-id', inv.root.revision)
 
320
        repo = self.make_repository('.',
 
321
                format=controldir.format_registry.get('knit')())
 
322
        inv_xml = '<inventory format="5">\n</inventory>\n'
 
323
        inv = repo._deserialise_inventory('test-rev-id', inv_xml)
 
324
        self.assertEqual('test-rev-id', inv.root.revision)
336
325
 
337
326
    def test_deserialise_uses_global_revision_id(self):
338
327
        """If it is set, then we re-use the global revision id"""
339
 
        repo = self.make_repository(
340
 
            '.', format=controldir.format_registry.get('knit')())
341
 
        inv_xml = (b'<inventory format="5" revision_id="other-rev-id">\n'
342
 
                   b'</inventory>\n')
 
328
        repo = self.make_repository('.',
 
329
                format=controldir.format_registry.get('knit')())
 
330
        inv_xml = ('<inventory format="5" revision_id="other-rev-id">\n'
 
331
                   '</inventory>\n')
343
332
        # Arguably, the deserialise_inventory should detect a mismatch, and
344
333
        # raise an error, rather than silently using one revision_id over the
345
334
        # other.
346
335
        self.assertRaises(AssertionError, repo._deserialise_inventory,
347
 
                          b'test-rev-id', [inv_xml])
348
 
        inv = repo._deserialise_inventory(b'other-rev-id', [inv_xml])
349
 
        self.assertEqual(b'other-rev-id', inv.root.revision)
 
336
            'test-rev-id', inv_xml)
 
337
        inv = repo._deserialise_inventory('other-rev-id', inv_xml)
 
338
        self.assertEqual('other-rev-id', inv.root.revision)
350
339
 
351
340
    def test_supports_external_lookups(self):
352
 
        repo = self.make_repository(
353
 
            '.', format=controldir.format_registry.get('knit')())
 
341
        repo = self.make_repository('.',
 
342
                format=controldir.format_registry.get('knit')())
354
343
        self.assertFalse(repo._format.supports_external_lookups)
355
344
 
356
345
 
384
373
    def is_compatible(repo_source, repo_target):
385
374
        """InterDummy is compatible with DummyRepository."""
386
375
        return (isinstance(repo_source, DummyRepository) and
387
 
                isinstance(repo_target, DummyRepository))
 
376
            isinstance(repo_target, DummyRepository))
388
377
 
389
378
 
390
379
class TestInterRepository(TestCaseWithTransport):
429
418
        repo = self.make_repository('.')
430
419
        # hack dummies to look like repo somewhat.
431
420
        dummy_a._serializer = repo._serializer
432
 
        dummy_a._format.supports_tree_reference = (
433
 
            repo._format.supports_tree_reference)
 
421
        dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
434
422
        dummy_a._format.rich_root_data = repo._format.rich_root_data
435
 
        dummy_a._format.supports_full_versioned_files = (
436
 
            repo._format.supports_full_versioned_files)
 
423
        dummy_a._format.supports_full_versioned_files = repo._format.supports_full_versioned_files
437
424
        dummy_b._serializer = repo._serializer
438
 
        dummy_b._format.supports_tree_reference = (
439
 
            repo._format.supports_tree_reference)
 
425
        dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
440
426
        dummy_b._format.rich_root_data = repo._format.rich_root_data
441
 
        dummy_b._format.supports_full_versioned_files = (
442
 
            repo._format.supports_full_versioned_files)
 
427
        dummy_b._format.supports_full_versioned_files = repo._format.supports_full_versioned_files
443
428
        repository.InterRepository.register_optimiser(InterDummy)
444
429
        try:
445
430
            # we should get the default for something InterDummy returns False
462
447
 
463
448
    @classmethod
464
449
    def get_format_string(cls):
465
 
        return b"Test Format 1"
 
450
        return "Test Format 1"
466
451
 
467
452
 
468
453
class TestRepositoryFormat2(knitrepo.RepositoryFormatKnit1):
469
454
 
470
455
    @classmethod
471
456
    def get_format_string(cls):
472
 
        return b"Test Format 2"
 
457
        return "Test Format 2"
473
458
 
474
459
 
475
460
class TestRepositoryConverter(TestCaseWithTransport):
479
464
        target_format = TestRepositoryFormat2()
480
465
        repository.format_registry.register(source_format)
481
466
        self.addCleanup(repository.format_registry.remove,
482
 
                        source_format)
 
467
            source_format)
483
468
        repository.format_registry.register(target_format)
484
469
        self.addCleanup(repository.format_registry.remove,
485
 
                        target_format)
 
470
            target_format)
486
471
        t = self.get_transport()
487
472
        t.mkdir('repository')
488
473
        repo_dir = bzrdir.BzrDirMetaFormat1().initialize('repository')
489
474
        repo = TestRepositoryFormat1().initialize(repo_dir)
490
475
        converter = repository.CopyConverter(target_format)
491
 
        with breezy.ui.ui_factory.nested_progress_bar() as pb:
 
476
        pb = breezy.ui.ui_factory.nested_progress_bar()
 
477
        try:
492
478
            converter.convert(repo, pb)
 
479
        finally:
 
480
            pb.finished()
493
481
        repo = repo_dir.open_repository()
494
482
        self.assertTrue(isinstance(target_format, repo._format.__class__))
495
483
 
515
503
        format = bzrdir.BzrDirMetaFormat1()
516
504
        format.repository_format = knitrepo.RepositoryFormatKnit1()
517
505
        tree = self.make_branch_and_tree('.', format)
518
 
        tree.commit("Dull commit", rev_id=b"dull")
519
 
        revision_tree = tree.branch.repository.revision_tree(b'dull')
520
 
        with revision_tree.lock_read():
521
 
            self.assertRaises(
522
 
                errors.NoSuchFile, revision_tree.get_file_lines, u'')
 
506
        tree.commit("Dull commit", rev_id="dull")
 
507
        revision_tree = tree.branch.repository.revision_tree('dull')
 
508
        revision_tree.lock_read()
 
509
        try:
 
510
            self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
 
511
                revision_tree.get_root_id())
 
512
        finally:
 
513
            revision_tree.unlock()
523
514
        format = bzrdir.BzrDirMetaFormat1()
524
515
        format.repository_format = knitrepo.RepositoryFormatKnit3()
525
516
        upgrade.Convert('.', format)
526
517
        tree = workingtree.WorkingTree.open('.')
527
 
        revision_tree = tree.branch.repository.revision_tree(b'dull')
528
 
        with revision_tree.lock_read():
529
 
            revision_tree.get_file_lines(u'')
530
 
        tree.commit("Another dull commit", rev_id=b'dull2')
531
 
        revision_tree = tree.branch.repository.revision_tree(b'dull2')
 
518
        revision_tree = tree.branch.repository.revision_tree('dull')
 
519
        revision_tree.lock_read()
 
520
        try:
 
521
            revision_tree.get_file_lines(revision_tree.get_root_id())
 
522
        finally:
 
523
            revision_tree.unlock()
 
524
        tree.commit("Another dull commit", rev_id='dull2')
 
525
        revision_tree = tree.branch.repository.revision_tree('dull2')
532
526
        revision_tree.lock_read()
533
527
        self.addCleanup(revision_tree.unlock)
534
 
        self.assertEqual(b'dull', revision_tree.get_file_revision(u''))
 
528
        self.assertEqual('dull',
 
529
                revision_tree.get_file_revision(revision_tree.get_root_id()))
535
530
 
536
531
    def test_supports_external_lookups(self):
537
532
        format = bzrdir.BzrDirMetaFormat1()
546
541
        mt = self.make_branch_and_memory_tree('test', format='2a')
547
542
        mt.lock_write()
548
543
        self.addCleanup(mt.unlock)
549
 
        mt.add([''], [b'root-id'])
 
544
        mt.add([''], ['root-id'])
550
545
        mt.commit('first')
551
546
        index = mt.branch.repository.chk_bytes._index._graph_index._indices[0]
552
547
        self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
553
548
        # It should also work if we re-open the repo
554
 
        repo = mt.branch.repository.controldir.open_repository()
 
549
        repo = mt.branch.repository.bzrdir.open_repository()
555
550
        repo.lock_read()
556
551
        self.addCleanup(repo.unlock)
557
552
        index = repo.chk_bytes._index._graph_index._indices[0]
560
555
    def test_fetch_combines_groups(self):
561
556
        builder = self.make_branch_builder('source', format='2a')
562
557
        builder.start_series()
563
 
        builder.build_snapshot(None, [
564
 
            ('add', ('', b'root-id', 'directory', '')),
565
 
            ('add', ('file', b'file-id', 'file', b'content\n'))],
566
 
            revision_id=b'1')
567
 
        builder.build_snapshot([b'1'], [
568
 
            ('modify', ('file', b'content-2\n'))],
569
 
            revision_id=b'2')
570
 
        builder.finish_series()
571
 
        source = builder.get_branch()
572
 
        target = self.make_repository('target', format='2a')
573
 
        target.fetch(source.repository)
574
 
        target.lock_read()
575
 
        self.addCleanup(target.unlock)
576
 
        details = target.texts._index.get_build_details(
577
 
            [(b'file-id', b'1',), (b'file-id', b'2',)])
578
 
        file_1_details = details[(b'file-id', b'1')]
579
 
        file_2_details = details[(b'file-id', b'2')]
580
 
        # The index, and what to read off disk, should be the same for both
581
 
        # versions of the file.
582
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
583
 
 
584
 
    def test_fetch_combines_groups(self):
585
 
        builder = self.make_branch_builder('source', format='2a')
586
 
        builder.start_series()
587
 
        builder.build_snapshot(None, [
588
 
            ('add', ('', b'root-id', 'directory', '')),
589
 
            ('add', ('file', b'file-id', 'file', b'content\n'))],
590
 
            revision_id=b'1')
591
 
        builder.build_snapshot([b'1'], [
592
 
            ('modify', ('file', b'content-2\n'))],
593
 
            revision_id=b'2')
594
 
        builder.finish_series()
595
 
        source = builder.get_branch()
596
 
        target = self.make_repository('target', format='2a')
597
 
        target.fetch(source.repository)
598
 
        target.lock_read()
599
 
        self.addCleanup(target.unlock)
600
 
        details = target.texts._index.get_build_details(
601
 
            [(b'file-id', b'1',), (b'file-id', b'2',)])
602
 
        file_1_details = details[(b'file-id', b'1')]
603
 
        file_2_details = details[(b'file-id', b'2')]
604
 
        # The index, and what to read off disk, should be the same for both
605
 
        # versions of the file.
606
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
607
 
 
608
 
    def test_fetch_combines_groups(self):
609
 
        builder = self.make_branch_builder('source', format='2a')
610
 
        builder.start_series()
611
 
        builder.build_snapshot(None, [
612
 
            ('add', ('', b'root-id', 'directory', '')),
613
 
            ('add', ('file', b'file-id', 'file', b'content\n'))],
614
 
            revision_id=b'1')
615
 
        builder.build_snapshot([b'1'], [
616
 
            ('modify', ('file', b'content-2\n'))],
617
 
            revision_id=b'2')
618
 
        builder.finish_series()
619
 
        source = builder.get_branch()
620
 
        target = self.make_repository('target', format='2a')
621
 
        target.fetch(source.repository)
622
 
        target.lock_read()
623
 
        self.addCleanup(target.unlock)
624
 
        details = target.texts._index.get_build_details(
625
 
            [(b'file-id', b'1',), (b'file-id', b'2',)])
626
 
        file_1_details = details[(b'file-id', b'1')]
627
 
        file_2_details = details[(b'file-id', b'2')]
 
558
        builder.build_snapshot('1', None, [
 
559
            ('add', ('', 'root-id', 'directory', '')),
 
560
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
561
        builder.build_snapshot('2', ['1'], [
 
562
            ('modify', ('file-id', 'content-2\n'))])
 
563
        builder.finish_series()
 
564
        source = builder.get_branch()
 
565
        target = self.make_repository('target', format='2a')
 
566
        target.fetch(source.repository)
 
567
        target.lock_read()
 
568
        self.addCleanup(target.unlock)
 
569
        details = target.texts._index.get_build_details(
 
570
            [('file-id', '1',), ('file-id', '2',)])
 
571
        file_1_details = details[('file-id', '1')]
 
572
        file_2_details = details[('file-id', '2')]
 
573
        # The index, and what to read off disk, should be the same for both
 
574
        # versions of the file.
 
575
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
576
 
 
577
    def test_fetch_combines_groups(self):
 
578
        builder = self.make_branch_builder('source', format='2a')
 
579
        builder.start_series()
 
580
        builder.build_snapshot('1', None, [
 
581
            ('add', ('', 'root-id', 'directory', '')),
 
582
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
583
        builder.build_snapshot('2', ['1'], [
 
584
            ('modify', ('file-id', 'content-2\n'))])
 
585
        builder.finish_series()
 
586
        source = builder.get_branch()
 
587
        target = self.make_repository('target', format='2a')
 
588
        target.fetch(source.repository)
 
589
        target.lock_read()
 
590
        self.addCleanup(target.unlock)
 
591
        details = target.texts._index.get_build_details(
 
592
            [('file-id', '1',), ('file-id', '2',)])
 
593
        file_1_details = details[('file-id', '1')]
 
594
        file_2_details = details[('file-id', '2')]
 
595
        # The index, and what to read off disk, should be the same for both
 
596
        # versions of the file.
 
597
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
598
 
 
599
    def test_fetch_combines_groups(self):
 
600
        builder = self.make_branch_builder('source', format='2a')
 
601
        builder.start_series()
 
602
        builder.build_snapshot('1', None, [
 
603
            ('add', ('', 'root-id', 'directory', '')),
 
604
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
605
        builder.build_snapshot('2', ['1'], [
 
606
            ('modify', ('file-id', 'content-2\n'))])
 
607
        builder.finish_series()
 
608
        source = builder.get_branch()
 
609
        target = self.make_repository('target', format='2a')
 
610
        target.fetch(source.repository)
 
611
        target.lock_read()
 
612
        self.addCleanup(target.unlock)
 
613
        details = target.texts._index.get_build_details(
 
614
            [('file-id', '1',), ('file-id', '2',)])
 
615
        file_1_details = details[('file-id', '1')]
 
616
        file_2_details = details[('file-id', '2')]
628
617
        # The index, and what to read off disk, should be the same for both
629
618
        # versions of the file.
630
619
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
636
625
    def test_inventories_use_chk_map_with_parent_base_dict(self):
637
626
        tree = self.make_branch_and_memory_tree('repo', format="2a")
638
627
        tree.lock_write()
639
 
        tree.add([''], [b'TREE_ROOT'])
 
628
        tree.add([''], ['TREE_ROOT'])
640
629
        revid = tree.commit("foo")
641
630
        tree.unlock()
642
631
        tree.lock_read()
646
635
        inv.parent_id_basename_to_file_id._ensure_root()
647
636
        inv.id_to_entry._ensure_root()
648
637
        self.assertEqual(65536, inv.id_to_entry._root_node.maximum_size)
649
 
        self.assertEqual(
650
 
            65536, inv.parent_id_basename_to_file_id._root_node.maximum_size)
 
638
        self.assertEqual(65536,
 
639
            inv.parent_id_basename_to_file_id._root_node.maximum_size)
651
640
 
652
641
    def test_autopack_unchanged_chk_nodes(self):
653
642
        # at 20 unchanged commits, chk pages are packed that are split into
657
646
        tree = self.make_branch_and_memory_tree('tree', format='2a')
658
647
        tree.lock_write()
659
648
        self.addCleanup(tree.unlock)
660
 
        tree.add([''], [b'TREE_ROOT'])
 
649
        tree.add([''], ['TREE_ROOT'])
661
650
        for pos in range(20):
662
651
            tree.commit(str(pos))
663
652
 
665
654
        tree = self.make_branch_and_memory_tree('tree', format='2a')
666
655
        tree.lock_write()
667
656
        self.addCleanup(tree.unlock)
668
 
        tree.add([''], [b'TREE_ROOT'])
 
657
        tree.add([''], ['TREE_ROOT'])
669
658
        # 1 commit to leave untouched
670
659
        tree.commit('1')
671
660
        to_keep = tree.branch.repository._pack_collection.names()
698
687
 
699
688
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
700
689
        source_builder = self.make_branch_builder('source',
701
 
                                                  format='2a')
 
690
                            format='2a')
702
691
        # We have to build a fairly large tree, so that we are sure the chk
703
692
        # pages will have split into multiple pages.
704
 
        entries = [('add', ('', b'a-root-id', 'directory', None))]
 
693
        entries = [('add', ('', 'a-root-id', 'directory', None))]
705
694
        for i in 'abcdefghijklmnopqrstuvwxyz123456789':
706
695
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
707
696
                fname = i + j
708
 
                fid = fname.encode('utf-8') + b'-id'
709
 
                content = b'content for %s\n' % (fname.encode('utf-8'),)
 
697
                fid = fname + '-id'
 
698
                content = 'content for %s\n' % (fname,)
710
699
                entries.append(('add', (fname, fid, 'file', content)))
711
700
        source_builder.start_series()
712
 
        source_builder.build_snapshot(None, entries, revision_id=b'rev-1')
 
701
        source_builder.build_snapshot('rev-1', None, entries)
713
702
        # Now change a few of them, so we get a few new pages for the second
714
703
        # revision
715
 
        source_builder.build_snapshot([b'rev-1'], [
716
 
            ('modify', ('aa', b'new content for aa-id\n')),
717
 
            ('modify', ('cc', b'new content for cc-id\n')),
718
 
            ('modify', ('zz', b'new content for zz-id\n')),
719
 
            ], revision_id=b'rev-2')
 
704
        source_builder.build_snapshot('rev-2', ['rev-1'], [
 
705
            ('modify', ('aa-id', 'new content for aa-id\n')),
 
706
            ('modify', ('cc-id', 'new content for cc-id\n')),
 
707
            ('modify', ('zz-id', 'new content for zz-id\n')),
 
708
            ])
720
709
        source_builder.finish_series()
721
710
        source_branch = source_builder.get_branch()
722
711
        source_branch.lock_read()
727
716
 
728
717
        # On a regular pass, getting the inventories and chk pages for rev-2
729
718
        # would only get the newly created chk pages
730
 
        search = vf_search.SearchResult({b'rev-2'}, {b'rev-1'}, 1,
731
 
                                        {b'rev-2'})
732
 
        simple_chk_records = set()
 
719
        search = vf_search.SearchResult({'rev-2'}, {'rev-1'}, 1,
 
720
                                    {'rev-2'})
 
721
        simple_chk_records = []
733
722
        for vf_name, substream in source.get_stream(search):
734
723
            if vf_name == 'chk_bytes':
735
724
                for record in substream:
736
 
                    simple_chk_records.add(record.key)
 
725
                    simple_chk_records.append(record.key)
737
726
            else:
738
727
                for _ in substream:
739
728
                    continue
740
729
        # 3 pages, the root (InternalNode), + 2 pages which actually changed
741
 
        self.assertEqual({(b'sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
742
 
                          (b'sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
743
 
                          (b'sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
744
 
                          (b'sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)},
745
 
                         set(simple_chk_records))
 
730
        self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
 
731
                          ('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
 
732
                          ('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
 
733
                          ('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
 
734
                         simple_chk_records)
746
735
        # Now, when we do a similar call using 'get_stream_for_missing_keys'
747
736
        # we should get a much larger set of pages.
748
 
        missing = [('inventories', b'rev-2')]
749
 
        full_chk_records = set()
 
737
        missing = [('inventories', 'rev-2')]
 
738
        full_chk_records = []
750
739
        for vf_name, substream in source.get_stream_for_missing_keys(missing):
751
740
            if vf_name == 'inventories':
752
741
                for record in substream:
753
 
                    self.assertEqual((b'rev-2',), record.key)
 
742
                    self.assertEqual(('rev-2',), record.key)
754
743
            elif vf_name == 'chk_bytes':
755
744
                for record in substream:
756
 
                    full_chk_records.add(record.key)
 
745
                    full_chk_records.append(record.key)
757
746
            else:
758
747
                self.fail('Should not be getting a stream of %s' % (vf_name,))
759
748
        # We have 257 records now. This is because we have 1 root page, and 256
776
765
        source = self.make_repository('source', format='pack-0.92')
777
766
        target = self.make_repository('target', format='pack-0.92')
778
767
        stream_source = source._get_source(target._format)
779
 
        self.assertIsInstance(
780
 
            stream_source, knitpack_repo.KnitPackStreamSource)
 
768
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
781
769
 
782
770
    def test_source_to_exact_pack_rich_root_pack(self):
783
771
        source = self.make_repository('source', format='rich-root-pack')
784
772
        target = self.make_repository('target', format='rich-root-pack')
785
773
        stream_source = source._get_source(target._format)
786
 
        self.assertIsInstance(
787
 
            stream_source, knitpack_repo.KnitPackStreamSource)
 
774
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
788
775
 
789
776
    def test_source_to_exact_pack_19(self):
790
777
        source = self.make_repository('source', format='1.9')
791
778
        target = self.make_repository('target', format='1.9')
792
779
        stream_source = source._get_source(target._format)
793
 
        self.assertIsInstance(
794
 
            stream_source, knitpack_repo.KnitPackStreamSource)
 
780
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
795
781
 
796
782
    def test_source_to_exact_pack_19_rich_root(self):
797
783
        source = self.make_repository('source', format='1.9-rich-root')
798
784
        target = self.make_repository('target', format='1.9-rich-root')
799
785
        stream_source = source._get_source(target._format)
800
 
        self.assertIsInstance(
801
 
            stream_source, knitpack_repo.KnitPackStreamSource)
 
786
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
802
787
 
803
788
    def test_source_to_remote_exact_pack_19(self):
804
789
        trans = self.make_smart_server('target')
807
792
        target = self.make_repository('target', format='1.9')
808
793
        target = repository.Repository.open(trans.base)
809
794
        stream_source = source._get_source(target._format)
810
 
        self.assertIsInstance(
811
 
            stream_source, knitpack_repo.KnitPackStreamSource)
 
795
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
812
796
 
813
797
    def test_stream_source_to_non_exact(self):
814
798
        source = self.make_repository('source', format='pack-0.92')
845
829
        super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
846
830
        self.builder = self.make_branch_builder('source')
847
831
        self.builder.start_series()
848
 
        self.builder.build_snapshot(
849
 
            None,
850
 
            [('add', ('', b'tree-root', 'directory', None))],
851
 
            revision_id=b'initial')
 
832
        self.builder.build_snapshot('initial', None,
 
833
            [('add', ('', 'tree-root', 'directory', None))])
852
834
        self.repo = self.builder.get_branch().repository
853
835
        self.addCleanup(self.builder.finish_series)
854
836
 
855
837
    def assertParentIds(self, expected_result, rev_set):
856
 
        self.assertEqual(
857
 
            sorted(expected_result),
 
838
        self.assertEqual(sorted(expected_result),
858
839
            sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
859
840
 
860
841
    def test_simple(self):
861
 
        self.builder.build_snapshot(None, [], revision_id=b'revid1')
862
 
        self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2')
863
 
        rev_set = [b'revid2']
864
 
        self.assertParentIds([b'revid1'], rev_set)
 
842
        self.builder.build_snapshot('revid1', None, [])
 
843
        self.builder.build_snapshot('revid2', ['revid1'], [])
 
844
        rev_set = ['revid2']
 
845
        self.assertParentIds(['revid1'], rev_set)
865
846
 
866
847
    def test_not_first_parent(self):
867
 
        self.builder.build_snapshot(None, [], revision_id=b'revid1')
868
 
        self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2')
869
 
        self.builder.build_snapshot([b'revid2'], [], revision_id=b'revid3')
870
 
        rev_set = [b'revid3', b'revid2']
871
 
        self.assertParentIds([b'revid1'], rev_set)
 
848
        self.builder.build_snapshot('revid1', None, [])
 
849
        self.builder.build_snapshot('revid2', ['revid1'], [])
 
850
        self.builder.build_snapshot('revid3', ['revid2'], [])
 
851
        rev_set = ['revid3', 'revid2']
 
852
        self.assertParentIds(['revid1'], rev_set)
872
853
 
873
854
    def test_not_null(self):
874
 
        rev_set = [b'initial']
 
855
        rev_set = ['initial']
875
856
        self.assertParentIds([], rev_set)
876
857
 
877
858
    def test_not_null_set(self):
878
 
        self.builder.build_snapshot(None, [], revision_id=b'revid1')
 
859
        self.builder.build_snapshot('revid1', None, [])
879
860
        rev_set = [_mod_revision.NULL_REVISION]
880
861
        self.assertParentIds([], rev_set)
881
862
 
882
863
    def test_ghost(self):
883
 
        self.builder.build_snapshot(None, [], revision_id=b'revid1')
884
 
        rev_set = [b'ghost', b'revid1']
885
 
        self.assertParentIds([b'initial'], rev_set)
 
864
        self.builder.build_snapshot('revid1', None, [])
 
865
        rev_set = ['ghost', 'revid1']
 
866
        self.assertParentIds(['initial'], rev_set)
886
867
 
887
868
    def test_ghost_parent(self):
888
 
        self.builder.build_snapshot(None, [], revision_id=b'revid1')
889
 
        self.builder.build_snapshot(
890
 
            [b'revid1', b'ghost'], [], revision_id=b'revid2')
891
 
        rev_set = [b'revid2', b'revid1']
892
 
        self.assertParentIds([b'ghost', b'initial'], rev_set)
 
869
        self.builder.build_snapshot('revid1', None, [])
 
870
        self.builder.build_snapshot('revid2', ['revid1', 'ghost'], [])
 
871
        rev_set = ['revid2', 'revid1']
 
872
        self.assertParentIds(['ghost', 'initial'], rev_set)
893
873
 
894
874
    def test_righthand_parent(self):
895
 
        self.builder.build_snapshot(None, [], revision_id=b'revid1')
896
 
        self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2a')
897
 
        self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2b')
898
 
        self.builder.build_snapshot([b'revid2a', b'revid2b'], [],
899
 
                                    revision_id=b'revid3')
900
 
        rev_set = [b'revid3', b'revid2a']
901
 
        self.assertParentIds([b'revid1', b'revid2b'], rev_set)
 
875
        self.builder.build_snapshot('revid1', None, [])
 
876
        self.builder.build_snapshot('revid2a', ['revid1'], [])
 
877
        self.builder.build_snapshot('revid2b', ['revid1'], [])
 
878
        self.builder.build_snapshot('revid3', ['revid2a', 'revid2b'], [])
 
879
        rev_set = ['revid3', 'revid2a']
 
880
        self.assertParentIds(['revid1', 'revid2b'], rev_set)
902
881
 
903
882
 
904
883
class TestWithBrokenRepo(TestCaseWithTransport):
916
895
            repo.start_write_group()
917
896
            cleanups.append(repo.commit_write_group)
918
897
            # make rev1a: A well-formed revision, containing 'file1'
919
 
            inv = inventory.Inventory(revision_id=b'rev1a')
920
 
            inv.root.revision = b'rev1a'
921
 
            self.add_file(repo, inv, 'file1', b'rev1a', [])
922
 
            repo.texts.add_lines((inv.root.file_id, b'rev1a'), [], [])
923
 
            repo.add_inventory(b'rev1a', inv, [])
924
 
            revision = _mod_revision.Revision(
925
 
                b'rev1a',
 
898
            inv = inventory.Inventory(revision_id='rev1a')
 
899
            inv.root.revision = 'rev1a'
 
900
            self.add_file(repo, inv, 'file1', 'rev1a', [])
 
901
            repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
 
902
            repo.add_inventory('rev1a', inv, [])
 
903
            revision = _mod_revision.Revision('rev1a',
926
904
                committer='jrandom@example.com', timestamp=0,
927
905
                inventory_sha1='', timezone=0, message='foo', parent_ids=[])
928
 
            repo.add_revision(b'rev1a', revision, inv)
 
906
            repo.add_revision('rev1a', revision, inv)
929
907
 
930
908
            # make rev1b, which has no Revision, but has an Inventory, and
931
909
            # file1
932
 
            inv = inventory.Inventory(revision_id=b'rev1b')
933
 
            inv.root.revision = b'rev1b'
934
 
            self.add_file(repo, inv, 'file1', b'rev1b', [])
935
 
            repo.add_inventory(b'rev1b', inv, [])
 
910
            inv = inventory.Inventory(revision_id='rev1b')
 
911
            inv.root.revision = 'rev1b'
 
912
            self.add_file(repo, inv, 'file1', 'rev1b', [])
 
913
            repo.add_inventory('rev1b', inv, [])
936
914
 
937
915
            # make rev2, with file1 and file2
938
916
            # file2 is sane
939
917
            # file1 has 'rev1b' as an ancestor, even though this is not
940
918
            # mentioned by 'rev1a', making it an unreferenced ancestor
941
919
            inv = inventory.Inventory()
942
 
            self.add_file(repo, inv, 'file1', b'rev2', [b'rev1a', b'rev1b'])
943
 
            self.add_file(repo, inv, 'file2', b'rev2', [])
944
 
            self.add_revision(repo, b'rev2', inv, [b'rev1a'])
 
920
            self.add_file(repo, inv, 'file1', 'rev2', ['rev1a', 'rev1b'])
 
921
            self.add_file(repo, inv, 'file2', 'rev2', [])
 
922
            self.add_revision(repo, 'rev2', inv, ['rev1a'])
945
923
 
946
924
            # make ghost revision rev1c
947
925
            inv = inventory.Inventory()
948
 
            self.add_file(repo, inv, 'file2', b'rev1c', [])
 
926
            self.add_file(repo, inv, 'file2', 'rev1c', [])
949
927
 
950
928
            # make rev3 with file2
951
929
            # file2 refers to 'rev1c', which is a ghost in this repository, so
952
930
            # file2 cannot have rev1c as its ancestor.
953
931
            inv = inventory.Inventory()
954
 
            self.add_file(repo, inv, 'file2', b'rev3', [b'rev1c'])
955
 
            self.add_revision(repo, b'rev3', inv, [b'rev1c'])
 
932
            self.add_file(repo, inv, 'file2', 'rev3', ['rev1c'])
 
933
            self.add_revision(repo, 'rev3', inv, ['rev1c'])
956
934
            return repo
957
935
        finally:
958
936
            for cleanup in reversed(cleanups):
963
941
        inv.root.revision = revision_id
964
942
        repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
965
943
        repo.add_inventory(revision_id, inv, parent_ids)
966
 
        revision = _mod_revision.Revision(
967
 
            revision_id,
 
944
        revision = _mod_revision.Revision(revision_id,
968
945
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
969
946
            timezone=0, message='foo', parent_ids=parent_ids)
970
947
        repo.add_revision(revision_id, revision, inv)
971
948
 
972
949
    def add_file(self, repo, inv, filename, revision, parents):
973
 
        file_id = filename.encode('utf-8') + b'-id'
974
 
        content = [b'line\n']
975
 
        entry = inventory.InventoryFile(file_id, filename, b'TREE_ROOT')
 
950
        file_id = filename + '-id'
 
951
        entry = inventory.InventoryFile(file_id, filename, 'TREE_ROOT')
976
952
        entry.revision = revision
977
 
        entry.text_sha1 = osutils.sha_strings(content)
978
953
        entry.text_size = 0
979
954
        inv.add(entry)
980
955
        text_key = (file_id, revision)
981
956
        parent_keys = [(file_id, parent) for parent in parents]
982
 
        repo.texts.add_lines(text_key, parent_keys, content)
 
957
        repo.texts.add_lines(text_key, parent_keys, ['line\n'])
983
958
 
984
959
    def test_insert_from_broken_repo(self):
985
960
        """Inserting a data stream from a broken repository won't silently
996
971
        empty_repo.lock_read()
997
972
        self.addCleanup(empty_repo.unlock)
998
973
        text = next(empty_repo.texts.get_record_stream(
999
 
            [(b'file2-id', b'rev3')], 'topological', True))
1000
 
        self.assertEqual(b'line\n', text.get_bytes_as('fulltext'))
 
974
            [('file2-id', 'rev3')], 'topological', True))
 
975
        self.assertEqual('line\n', text.get_bytes_as('fulltext'))
1001
976
 
1002
977
 
1003
978
class TestRepositoryPackCollection(TestCaseWithTransport):
1004
979
 
1005
980
    def get_format(self):
1006
 
        return controldir.format_registry.make_controldir('pack-0.92')
 
981
        return controldir.format_registry.make_bzrdir('pack-0.92')
1007
982
 
1008
983
    def get_packs(self):
1009
984
        format = self.get_format()
1031
1006
    def test__clear_obsolete_packs(self):
1032
1007
        packs = self.get_packs()
1033
1008
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1034
 
        obsolete_pack_trans.put_bytes('a-pack.pack', b'content\n')
1035
 
        obsolete_pack_trans.put_bytes('a-pack.rix', b'content\n')
1036
 
        obsolete_pack_trans.put_bytes('a-pack.iix', b'content\n')
1037
 
        obsolete_pack_trans.put_bytes('another-pack.pack', b'foo\n')
1038
 
        obsolete_pack_trans.put_bytes('not-a-pack.rix', b'foo\n')
 
1009
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
 
1010
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
 
1011
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
 
1012
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
 
1013
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1039
1014
        res = packs._clear_obsolete_packs()
1040
1015
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1041
1016
        self.assertEqual([], obsolete_pack_trans.list_dir('.'))
1043
1018
    def test__clear_obsolete_packs_preserve(self):
1044
1019
        packs = self.get_packs()
1045
1020
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1046
 
        obsolete_pack_trans.put_bytes('a-pack.pack', b'content\n')
1047
 
        obsolete_pack_trans.put_bytes('a-pack.rix', b'content\n')
1048
 
        obsolete_pack_trans.put_bytes('a-pack.iix', b'content\n')
1049
 
        obsolete_pack_trans.put_bytes('another-pack.pack', b'foo\n')
1050
 
        obsolete_pack_trans.put_bytes('not-a-pack.rix', b'foo\n')
 
1021
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
 
1022
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
 
1023
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
 
1024
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
 
1025
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1051
1026
        res = packs._clear_obsolete_packs(preserve={'a-pack'})
1052
1027
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1053
1028
        self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
1081
1056
    def test_repr(self):
1082
1057
        packs = self.get_packs()
1083
1058
        self.assertContainsRe(repr(packs),
1084
 
                              'RepositoryPackCollection(.*Repository(.*))')
 
1059
            'RepositoryPackCollection(.*Repository(.*))')
1085
1060
 
1086
1061
    def test__obsolete_packs(self):
1087
1062
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1102
1077
                         sorted(packs._pack_transport.list_dir('.')))
1103
1078
        # names[0] should not be present in the index anymore
1104
1079
        self.assertEqual(names[1:],
1105
 
                         sorted({osutils.splitext(n)[0] for n in
1106
 
                                 packs._index_transport.list_dir('.')}))
 
1080
            sorted({osutils.splitext(n)[0] for n in
 
1081
                        packs._index_transport.list_dir('.')}))
1107
1082
 
1108
1083
    def test__obsolete_packs_missing_directory(self):
1109
1084
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1119
1094
                         sorted(packs._pack_transport.list_dir('.')))
1120
1095
        # names[0] should not be present in the index anymore
1121
1096
        self.assertEqual(names[1:],
1122
 
                         sorted({osutils.splitext(n)[0] for n in
1123
 
                                 packs._index_transport.list_dir('.')}))
 
1097
            sorted({osutils.splitext(n)[0] for n in
 
1098
                        packs._index_transport.list_dir('.')}))
1124
1099
 
1125
1100
    def test_pack_distribution_zero(self):
1126
1101
        packs = self.get_packs()
1134
1109
    def test_pack_distribution_one_to_nine(self):
1135
1110
        packs = self.get_packs()
1136
1111
        self.assertEqual([1],
1137
 
                         packs.pack_distribution(1))
 
1112
            packs.pack_distribution(1))
1138
1113
        self.assertEqual([1, 1],
1139
 
                         packs.pack_distribution(2))
 
1114
            packs.pack_distribution(2))
1140
1115
        self.assertEqual([1, 1, 1],
1141
 
                         packs.pack_distribution(3))
 
1116
            packs.pack_distribution(3))
1142
1117
        self.assertEqual([1, 1, 1, 1],
1143
 
                         packs.pack_distribution(4))
 
1118
            packs.pack_distribution(4))
1144
1119
        self.assertEqual([1, 1, 1, 1, 1],
1145
 
                         packs.pack_distribution(5))
 
1120
            packs.pack_distribution(5))
1146
1121
        self.assertEqual([1, 1, 1, 1, 1, 1],
1147
 
                         packs.pack_distribution(6))
 
1122
            packs.pack_distribution(6))
1148
1123
        self.assertEqual([1, 1, 1, 1, 1, 1, 1],
1149
 
                         packs.pack_distribution(7))
 
1124
            packs.pack_distribution(7))
1150
1125
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1],
1151
 
                         packs.pack_distribution(8))
 
1126
            packs.pack_distribution(8))
1152
1127
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1, 1],
1153
 
                         packs.pack_distribution(9))
 
1128
            packs.pack_distribution(9))
1154
1129
 
1155
1130
    def test_pack_distribution_stable_at_boundaries(self):
1156
1131
        """When there are multi-rev packs the counts are stable."""
1187
1162
    def test_plan_pack_operations_2010_combines_smallest_two(self):
1188
1163
        packs = self.get_packs()
1189
1164
        existing_packs = [(1999, "big"), (9, "medium"), (1, "single2"),
1190
 
                          (1, "single1")]
 
1165
            (1, "single1")]
1191
1166
        # rev count - 2010 -> 2x1000 + 1x10 (3)
1192
1167
        pack_operations = packs.plan_autopack_combinations(
1193
1168
            existing_packs, [1000, 1000, 10])
1260
1235
        inv_index = GraphIndex(packs._index_transport, name + '.iix', sizes[1])
1261
1236
        txt_index = GraphIndex(packs._index_transport, name + '.tix', sizes[2])
1262
1237
        sig_index = GraphIndex(packs._index_transport, name + '.six', sizes[3])
1263
 
        self.assertEqual(
1264
 
            pack_repo.ExistingPack(
1265
 
                packs._pack_transport, name, rev_index, inv_index, txt_index,
1266
 
                sig_index), pack_1)
 
1238
        self.assertEqual(pack_repo.ExistingPack(packs._pack_transport,
 
1239
            name, rev_index, inv_index, txt_index, sig_index), pack_1)
1267
1240
        # and the same instance should be returned on successive calls.
1268
1241
        self.assertTrue(pack_1 is packs.get_pack_by_name(name))
1269
1242
 
1281
1254
        self.assertTrue(packs.reload_pack_names())
1282
1255
        self.assertEqual(new_names, packs.names())
1283
1256
        # And the repository can access the new revision
1284
 
        self.assertEqual({rev4: (revs[-1],)}, r.get_parent_map([rev4]))
 
1257
        self.assertEqual({rev4:(revs[-1],)}, r.get_parent_map([rev4]))
1285
1258
        self.assertFalse(packs.reload_pack_names())
1286
1259
 
1287
1260
    def test_reload_pack_names_added_and_removed(self):
1294
1267
        self.assertEqual(names, packs.names())
1295
1268
        self.assertTrue(packs.reload_pack_names())
1296
1269
        self.assertEqual(new_names, packs.names())
1297
 
        self.assertEqual({revs[-1]: (revs[-2],)}, r.get_parent_map([revs[-1]]))
 
1270
        self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
1298
1271
        self.assertFalse(packs.reload_pack_names())
1299
1272
 
1300
1273
    def test_reload_pack_names_preserves_pending(self):
1308
1281
        r.start_write_group()
1309
1282
        self.addCleanup(r.abort_write_group)
1310
1283
        r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
1311
 
            (b'text', b'rev'), (), None, b'content\n')])
 
1284
            ('text', 'rev'), (), None, 'content\n')])
1312
1285
        new_pack = packs._new_pack
1313
1286
        self.assertTrue(new_pack.data_inserted())
1314
1287
        new_pack.finish()
1318
1291
        packs._remove_pack_from_memory(removed_pack)
1319
1292
        names = packs.names()
1320
1293
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1321
 
        new_names = {x[0] for x in new_nodes}
1322
 
        self.assertEqual(names, sorted([x[0] for x in all_nodes]))
 
1294
        new_names = {x[0][0] for x in new_nodes}
 
1295
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1323
1296
        self.assertEqual(set(names) - set(orig_names), new_names)
1324
1297
        self.assertEqual({new_pack.name}, new_names)
1325
1298
        self.assertEqual([to_remove_name],
1326
 
                         sorted([x[0] for x in deleted_nodes]))
 
1299
                         sorted([x[0][0] for x in deleted_nodes]))
1327
1300
        packs.reload_pack_names()
1328
1301
        reloaded_names = packs.names()
1329
1302
        self.assertEqual(orig_at_load, packs._packs_at_load)
1330
1303
        self.assertEqual(names, reloaded_names)
1331
1304
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1332
 
        new_names = {x[0] for x in new_nodes}
1333
 
        self.assertEqual(names, sorted([x[0] for x in all_nodes]))
 
1305
        new_names = {x[0][0] for x in new_nodes}
 
1306
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1334
1307
        self.assertEqual(set(names) - set(orig_names), new_names)
1335
1308
        self.assertEqual({new_pack.name}, new_names)
1336
1309
        self.assertEqual([to_remove_name],
1337
 
                         sorted([x[0] for x in deleted_nodes]))
 
1310
                         sorted([x[0][0] for x in deleted_nodes]))
1338
1311
 
1339
1312
    def test_autopack_obsoletes_new_pack(self):
1340
1313
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1342
1315
        packs.pack_distribution = lambda x: [10]
1343
1316
        r.start_write_group()
1344
1317
        r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
1345
 
            (b'bogus-rev',), (), None, b'bogus-content\n')])
 
1318
            ('bogus-rev',), (), None, 'bogus-content\n')])
1346
1319
        # This should trigger an autopack, which will combine everything into a
1347
1320
        # single pack file.
1348
 
        r.commit_write_group()
 
1321
        new_names = r.commit_write_group()
1349
1322
        names = packs.names()
1350
1323
        self.assertEqual(1, len(names))
1351
1324
        self.assertEqual([names[0] + '.pack'],
1357
1330
        # full-pack via the other repo which will cause us to re-evaluate and
1358
1331
        # decide we don't need to do anything
1359
1332
        orig_execute = packs._execute_pack_operations
1360
 
 
1361
1333
        def _munged_execute_pack_ops(*args, **kwargs):
1362
1334
            tree.branch.repository.pack()
1363
1335
            return orig_execute(*args, **kwargs)
1476
1448
            index_class=BTreeGraphIndex,
1477
1449
            use_chk_index=False)
1478
1450
        pack = pack_repo.NewPack(collection)
1479
 
        self.addCleanup(pack.abort)  # Make sure the write stream gets closed
 
1451
        self.addCleanup(pack.abort) # Make sure the write stream gets closed
1480
1452
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
1481
1453
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
1482
1454
        self.assertIsInstance(pack._hash, type(osutils.md5()))
1495
1467
    def test_pack_optimizes_pack_order(self):
1496
1468
        builder = self.make_branch_builder('.', format="1.9")
1497
1469
        builder.start_series()
1498
 
        builder.build_snapshot(None, [
1499
 
            ('add', ('', b'root-id', 'directory', None)),
1500
 
            ('add', ('f', b'f-id', 'file', b'content\n'))],
1501
 
            revision_id=b'A')
1502
 
        builder.build_snapshot([b'A'],
1503
 
                               [('modify', ('f', b'new-content\n'))],
1504
 
                               revision_id=b'B')
1505
 
        builder.build_snapshot([b'B'],
1506
 
                               [('modify', ('f', b'third-content\n'))],
1507
 
                               revision_id=b'C')
1508
 
        builder.build_snapshot([b'C'],
1509
 
                               [('modify', ('f', b'fourth-content\n'))],
1510
 
                               revision_id=b'D')
 
1470
        builder.build_snapshot('A', None, [
 
1471
            ('add', ('', 'root-id', 'directory', None)),
 
1472
            ('add', ('f', 'f-id', 'file', 'content\n'))])
 
1473
        builder.build_snapshot('B', ['A'],
 
1474
            [('modify', ('f-id', 'new-content\n'))])
 
1475
        builder.build_snapshot('C', ['B'],
 
1476
            [('modify', ('f-id', 'third-content\n'))])
 
1477
        builder.build_snapshot('D', ['C'],
 
1478
            [('modify', ('f-id', 'fourth-content\n'))])
1511
1479
        b = builder.get_branch()
1512
1480
        b.lock_read()
1513
1481
        builder.finish_series()
1517
1485
        # ['D', 'C', 'B', 'A']
1518
1486
        packs = b.repository._pack_collection.packs
1519
1487
        packer = knitpack_repo.KnitPacker(b.repository._pack_collection,
1520
 
                                          packs, 'testing',
1521
 
                                          revision_ids=[b'B', b'C'])
 
1488
                                  packs, 'testing',
 
1489
                                  revision_ids=['B', 'C'])
1522
1490
        # Now, when we are copying the B & C revisions, their pack files should
1523
1491
        # be moved to the front of the stack
1524
1492
        # The new ordering moves B & C to the front of the .packs attribute,
1525
1493
        # and leaves the others in the original order.
1526
1494
        new_packs = [packs[1], packs[2], packs[0], packs[3]]
1527
 
        packer.pack()
 
1495
        new_pack = packer.pack()
1528
1496
        self.assertEqual(new_packs, packer.packs)
1529
1497
 
1530
1498
 
1537
1505
 
1538
1506
    def test_open_pack_will_optimise(self):
1539
1507
        packer = knitpack_repo.OptimisingKnitPacker(self.get_pack_collection(),
1540
 
                                                    [], '.test')
 
1508
                                            [], '.test')
1541
1509
        new_pack = packer.open_pack()
1542
 
        self.addCleanup(new_pack.abort)  # ensure cleanup
 
1510
        self.addCleanup(new_pack.abort) # ensure cleanup
1543
1511
        self.assertIsInstance(new_pack, pack_repo.NewPack)
1544
1512
        self.assertTrue(new_pack.revision_index._optimize_for_size)
1545
1513
        self.assertTrue(new_pack.inventory_index._optimize_for_size)
1552
1520
    def make_abc_branch(self):
1553
1521
        builder = self.make_branch_builder('source')
1554
1522
        builder.start_series()
1555
 
        builder.build_snapshot(None, [
1556
 
            ('add', ('', b'root-id', 'directory', None)),
1557
 
            ('add', ('file', b'file-id', 'file', b'content\n')),
1558
 
            ], revision_id=b'A')
1559
 
        builder.build_snapshot([b'A'], [
1560
 
            ('add', ('dir', b'dir-id', 'directory', None))],
1561
 
            revision_id=b'B')
1562
 
        builder.build_snapshot([b'B'], [
1563
 
            ('modify', ('file', b'new content\n'))],
1564
 
            revision_id=b'C')
 
1523
        builder.build_snapshot('A', None, [
 
1524
            ('add', ('', 'root-id', 'directory', None)),
 
1525
            ('add', ('file', 'file-id', 'file', 'content\n')),
 
1526
            ])
 
1527
        builder.build_snapshot('B', ['A'], [
 
1528
            ('add', ('dir', 'dir-id', 'directory', None))])
 
1529
        builder.build_snapshot('C', ['B'], [
 
1530
            ('modify', ('file-id', 'new content\n'))])
1565
1531
        builder.finish_series()
1566
1532
        return builder.get_branch()
1567
1533
 
1577
1543
                  pack_name_with_rev_C_content)
1578
1544
        """
1579
1545
        b_source = self.make_abc_branch()
1580
 
        b_base = b_source.controldir.sprout(
1581
 
            'base', revision_id=b'A').open_branch()
1582
 
        b_stacked = b_base.controldir.sprout(
1583
 
            'stacked', stacked=True).open_branch()
 
1546
        b_base = b_source.bzrdir.sprout('base', revision_id='A').open_branch()
 
1547
        b_stacked = b_base.bzrdir.sprout('stacked', stacked=True).open_branch()
1584
1548
        b_stacked.lock_write()
1585
1549
        self.addCleanup(b_stacked.unlock)
1586
 
        b_stacked.fetch(b_source, b'B')
 
1550
        b_stacked.fetch(b_source, 'B')
1587
1551
        # Now re-open the stacked repo directly (no fallbacks) so that we can
1588
1552
        # fill in the A rev.
1589
 
        repo_not_stacked = b_stacked.controldir.open_repository()
 
1553
        repo_not_stacked = b_stacked.bzrdir.open_repository()
1590
1554
        repo_not_stacked.lock_write()
1591
1555
        self.addCleanup(repo_not_stacked.unlock)
1592
1556
        # Now we should have a pack file with A's inventory, but not its
1593
1557
        # Revision
1594
 
        self.assertEqual([(b'A',), (b'B',)],
 
1558
        self.assertEqual([('A',), ('B',)],
1595
1559
                         sorted(repo_not_stacked.inventories.keys()))
1596
 
        self.assertEqual([(b'B',)],
 
1560
        self.assertEqual([('B',)],
1597
1561
                         sorted(repo_not_stacked.revisions.keys()))
1598
1562
        stacked_pack_names = repo_not_stacked._pack_collection.names()
1599
1563
        # We have a couple names here, figure out which has A's inventory
1600
1564
        for name in stacked_pack_names:
1601
1565
            pack = repo_not_stacked._pack_collection.get_pack_by_name(name)
1602
1566
            keys = [n[1] for n in pack.inventory_index.iter_all_entries()]
1603
 
            if (b'A',) in keys:
 
1567
            if ('A',) in keys:
1604
1568
                inv_a_pack_name = name
1605
1569
                break
1606
1570
        else:
1607
1571
            self.fail('Could not find pack containing A\'s inventory')
1608
 
        repo_not_stacked.fetch(b_source.repository, b'A')
1609
 
        self.assertEqual([(b'A',), (b'B',)],
 
1572
        repo_not_stacked.fetch(b_source.repository, 'A')
 
1573
        self.assertEqual([('A',), ('B',)],
1610
1574
                         sorted(repo_not_stacked.revisions.keys()))
1611
1575
        new_pack_names = set(repo_not_stacked._pack_collection.names())
1612
1576
        rev_a_pack_names = new_pack_names.difference(stacked_pack_names)
1613
1577
        self.assertEqual(1, len(rev_a_pack_names))
1614
1578
        rev_a_pack_name = list(rev_a_pack_names)[0]
1615
1579
        # Now fetch 'C', so we have a couple pack files to join
1616
 
        repo_not_stacked.fetch(b_source.repository, b'C')
 
1580
        repo_not_stacked.fetch(b_source.repository, 'C')
1617
1581
        rev_c_pack_names = set(repo_not_stacked._pack_collection.names())
1618
1582
        rev_c_pack_names = rev_c_pack_names.difference(new_pack_names)
1619
1583
        self.assertEqual(1, len(rev_c_pack_names))
1631
1595
        a_pack = repo._pack_collection.get_pack_by_name(rev_a_pack_name)
1632
1596
        c_pack = repo._pack_collection.get_pack_by_name(rev_c_pack_name)
1633
1597
        packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
1634
 
                                                [a_pack, c_pack], '.test-pack')
 
1598
                    [a_pack, c_pack], '.test-pack')
1635
1599
        # This would raise ValueError in bug #437003, but should not raise an
1636
1600
        # error once fixed.
1637
1601
        packer.pack()
1644
1608
        inv_a_pack = repo._pack_collection.get_pack_by_name(inv_a_pack_name)
1645
1609
        repo._pack_collection._remove_pack_from_memory(inv_a_pack)
1646
1610
        packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
1647
 
                                                repo._pack_collection.all_packs(), '.test-pack')
 
1611
            repo._pack_collection.all_packs(), '.test-pack')
1648
1612
        e = self.assertRaises(ValueError, packer.pack)
1649
1613
        packer.new_pack.abort()
1650
1614
        self.assertContainsRe(str(e),
1651
 
                              r"We are missing inventories for revisions: .*'A'")
 
1615
            r"We are missing inventories for revisions: .*'A'")
1652
1616
 
1653
1617
 
1654
1618
class TestCrossFormatPacks(TestCaseWithTransport):
1689
1653
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
1690
1654
        source_tree.lock_write()
1691
1655
        self.addCleanup(source_tree.unlock)
1692
 
        source_tree.commit('foo')
 
1656
        tip = source_tree.commit('foo')
1693
1657
        target = self.make_repository('target', format=target_fmt)
1694
1658
        target.lock_write()
1695
1659
        self.addCleanup(target.unlock)
1744
1708
    def test_open_with_present_feature(self):
1745
1709
        self.addCleanup(
1746
1710
            bzrrepository.RepositoryFormatMetaDir.unregister_feature,
1747
 
            b"makes-cheese-sandwich")
 
1711
            "makes-cheese-sandwich")
1748
1712
        bzrrepository.RepositoryFormatMetaDir.register_feature(
1749
 
            b"makes-cheese-sandwich")
 
1713
            "makes-cheese-sandwich")
1750
1714
        repo = self.make_repository('.')
1751
1715
        repo.lock_write()
1752
 
        repo._format.features[b"makes-cheese-sandwich"] = b"required"
 
1716
        repo._format.features["makes-cheese-sandwich"] = "required"
1753
1717
        repo._format.check_support_status(False)
1754
1718
        repo.unlock()
1755
1719
 
1756
1720
    def test_open_with_missing_required_feature(self):
1757
1721
        repo = self.make_repository('.')
1758
1722
        repo.lock_write()
1759
 
        repo._format.features[b"makes-cheese-sandwich"] = b"required"
1760
 
        self.assertRaises(bzrdir.MissingFeature,
1761
 
                          repo._format.check_support_status, False)
 
1723
        repo._format.features["makes-cheese-sandwich"] = "required"
 
1724
        self.assertRaises(errors.MissingFeature,
 
1725
            repo._format.check_support_status, False)