/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/tests/test_repository.py

  • Committer: Jelmer Vernooij
  • Date: 2020-05-06 02:13:25 UTC
  • mfrom: (7490.7.21 work)
  • mto: This revision was merged to the branch mainline in revision 7501.
  • Revision ID: jelmer@jelmer.uk-20200506021325-awbmmqu1zyorz7sj
Merge 3.1 branch.

Show diffs side-by-side

added added

removed removed

Lines of Context:
27
27
import breezy
28
28
from breezy.errors import (
29
29
    UnknownFormatError,
30
 
    UnsupportedFormatError,
31
30
    )
32
31
from breezy import (
33
32
    tests,
73
72
        private_default = old_default().repository_format.__class__
74
73
        old_format = repository.format_registry.get_default()
75
74
        self.assertTrue(isinstance(old_format, private_default))
 
75
 
76
76
        def make_sample_bzrdir():
77
77
            my_bzrdir = bzrdir.BzrDirMetaFormat1()
78
78
            my_bzrdir.repository_format = SampleRepositoryFormat()
105
105
    @classmethod
106
106
    def get_format_string(cls):
107
107
        """See RepositoryFormat.get_format_string()."""
108
 
        return "Sample .bzr repository format."
 
108
        return b"Sample .bzr repository format."
109
109
 
110
110
    def initialize(self, a_controldir, shared=False):
111
111
        """Initialize a repository in a BzrDir"""
137
137
        # create a branch with a few known format objects.
138
138
        # this is not quite the same as
139
139
        self.build_tree(["foo/", "bar/"])
 
140
 
140
141
        def check_format(format, url):
141
142
            dir = format._matchingcontroldir.initialize(url)
142
143
            format.initialize(dir)
143
 
            t = transport.get_transport_from_path(url)
144
 
            found_format = bzrrepository.RepositoryFormatMetaDir.find_format(dir)
 
144
            found_format = bzrrepository.RepositoryFormatMetaDir.find_format(
 
145
                dir)
145
146
            self.assertIsInstance(found_format, format.__class__)
146
147
        check_format(repository.format_registry.get_default(), "bar")
147
148
 
154
155
    def test_from_string(self):
155
156
        self.assertIsInstance(
156
157
            SampleRepositoryFormat.from_string(
157
 
                "Sample .bzr repository format."),
 
158
                b"Sample .bzr repository format."),
158
159
            SampleRepositoryFormat)
159
160
        self.assertRaises(AssertionError,
160
 
            SampleRepositoryFormat.from_string,
161
 
                "Different .bzr repository format.")
 
161
                          SampleRepositoryFormat.from_string,
 
162
                          b"Different .bzr repository format.")
162
163
 
163
164
    def test_find_format_unknown_format(self):
164
165
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
169
170
 
170
171
    def test_find_format_with_features(self):
171
172
        tree = self.make_branch_and_tree('.', format='2a')
172
 
        tree.branch.repository.update_feature_flags({"name": "necessity"})
173
 
        found_format = bzrrepository.RepositoryFormatMetaDir.find_format(tree.controldir)
174
 
        self.assertIsInstance(found_format, bzrrepository.RepositoryFormatMetaDir)
175
 
        self.assertEqual(found_format.features.get("name"), "necessity")
176
 
        self.assertRaises(bzrdir.MissingFeature, found_format.check_support_status,
177
 
            True)
178
 
        self.addCleanup(bzrrepository.RepositoryFormatMetaDir.unregister_feature,
179
 
            "name")
180
 
        bzrrepository.RepositoryFormatMetaDir.register_feature("name")
 
173
        tree.branch.repository.update_feature_flags({b"name": b"necessity"})
 
174
        found_format = bzrrepository.RepositoryFormatMetaDir.find_format(
 
175
            tree.controldir)
 
176
        self.assertIsInstance(
 
177
            found_format, bzrrepository.RepositoryFormatMetaDir)
 
178
        self.assertEqual(found_format.features.get(b"name"), b"necessity")
 
179
        self.assertRaises(
 
180
            bzrdir.MissingFeature, found_format.check_support_status, True)
 
181
        self.addCleanup(
 
182
            bzrrepository.RepositoryFormatMetaDir.unregister_feature, b"name")
 
183
        bzrrepository.RepositoryFormatMetaDir.register_feature(b"name")
181
184
        found_format.check_support_status(True)
182
185
 
183
186
 
190
193
    def test_register_unregister_format(self):
191
194
        format = SampleRepositoryFormat()
192
195
        self.registry.register(format)
193
 
        self.assertEqual(format, self.registry.get("Sample .bzr repository format."))
 
196
        self.assertEqual(format, self.registry.get(
 
197
            b"Sample .bzr repository format."))
194
198
        self.registry.remove(format)
195
 
        self.assertRaises(KeyError, self.registry.get, "Sample .bzr repository format.")
 
199
        self.assertRaises(KeyError, self.registry.get,
 
200
                          b"Sample .bzr repository format.")
196
201
 
197
202
    def test_get_all(self):
198
203
        format = SampleRepositoryFormat()
209
214
    def test_register_extra_lazy(self):
210
215
        self.assertEqual([], self.registry._get_all())
211
216
        self.registry.register_extra_lazy("breezy.tests.test_repository",
212
 
            "SampleExtraRepositoryFormat")
 
217
                                          "SampleExtraRepositoryFormat")
213
218
        formats = self.registry._get_all()
214
219
        self.assertEqual(1, len(formats))
215
220
        self.assertIsInstance(formats[0], SampleExtraRepositoryFormat)
219
224
 
220
225
    def test_attribute__fetch_order(self):
221
226
        """Knits need topological data insertion."""
222
 
        repo = self.make_repository('.',
223
 
                format=controldir.format_registry.get('knit')())
 
227
        repo = self.make_repository(
 
228
            '.', format=controldir.format_registry.get('knit')())
224
229
        self.assertEqual('topological', repo._format._fetch_order)
225
230
 
226
231
    def test_attribute__fetch_uses_deltas(self):
227
232
        """Knits reuse deltas."""
228
 
        repo = self.make_repository('.',
229
 
                format=controldir.format_registry.get('knit')())
 
233
        repo = self.make_repository(
 
234
            '.', format=controldir.format_registry.get('knit')())
230
235
        self.assertEqual(True, repo._format._fetch_uses_deltas)
231
236
 
232
237
    def test_disk_layout(self):
242
247
        # empty revision-store directory
243
248
        # empty weaves directory
244
249
        t = control.get_repository_transport(None)
245
 
        self.assertEqualDiff('Bazaar-NG Knit Repository Format 1',
246
 
                             t.get('format').read())
 
250
        with t.get('format') as f:
 
251
            self.assertEqualDiff(b'Bazaar-NG Knit Repository Format 1',
 
252
                                 f.read())
247
253
        # XXX: no locks left when unlocked at the moment
248
254
        # self.assertEqualDiff('', t.get('lock').read())
249
255
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
250
256
        self.check_knits(t)
251
257
        # Check per-file knits.
252
 
        branch = control.create_branch()
 
258
        control.create_branch()
253
259
        tree = control.create_workingtree()
254
 
        tree.add(['foo'], ['Nasty-IdC:'], ['file'])
255
 
        tree.put_file_bytes_non_atomic('foo', '')
256
 
        tree.commit('1st post', rev_id='foo')
 
260
        tree.add(['foo'], [b'Nasty-IdC:'], ['file'])
 
261
        tree.put_file_bytes_non_atomic('foo', b'')
 
262
        tree.commit('1st post', rev_id=b'foo')
257
263
        self.assertHasKnit(t, 'knits/e8/%254easty-%2549d%2543%253a',
258
 
            '\nfoo fulltext 0 81  :')
 
264
                           b'\nfoo fulltext 0 81  :')
259
265
 
260
 
    def assertHasKnit(self, t, knit_name, extra_content=''):
 
266
    def assertHasKnit(self, t, knit_name, extra_content=b''):
261
267
        """Assert that knit_name exists on t."""
262
 
        self.assertEqualDiff('# bzr knit index 8\n' + extra_content,
 
268
        self.assertEqualDiff(b'# bzr knit index 8\n' + extra_content,
263
269
                             t.get(knit_name + '.kndx').read())
264
270
 
265
271
    def check_knits(self, t):
270
276
 
271
277
    def test_shared_disk_layout(self):
272
278
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
273
 
        repo = knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
 
279
        knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
274
280
        # we want:
275
281
        # format 'Bazaar-NG Knit Repository Format 1'
276
282
        # lock: is a directory
279
285
        # empty weaves directory
280
286
        # a 'shared-storage' marker file.
281
287
        t = control.get_repository_transport(None)
282
 
        self.assertEqualDiff('Bazaar-NG Knit Repository Format 1',
283
 
                             t.get('format').read())
 
288
        with t.get('format') as f:
 
289
            self.assertEqualDiff(b'Bazaar-NG Knit Repository Format 1',
 
290
                                 f.read())
284
291
        # XXX: no locks left when unlocked at the moment
285
292
        # self.assertEqualDiff('', t.get('lock').read())
286
 
        self.assertEqualDiff('', t.get('shared-storage').read())
 
293
        self.assertEqualDiff(b'', t.get('shared-storage').read())
287
294
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
288
295
        self.check_knits(t)
289
296
 
290
297
    def test_shared_no_tree_disk_layout(self):
291
298
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
292
 
        repo = knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
 
299
        repo = knitrepo.RepositoryFormatKnit1().initialize(
 
300
            control, shared=True)
293
301
        repo.set_make_working_trees(False)
294
302
        # we want:
295
303
        # format 'Bazaar-NG Knit Repository Format 1'
299
307
        # empty weaves directory
300
308
        # a 'shared-storage' marker file.
301
309
        t = control.get_repository_transport(None)
302
 
        self.assertEqualDiff('Bazaar-NG Knit Repository Format 1',
303
 
                             t.get('format').read())
 
310
        with t.get('format') as f:
 
311
            self.assertEqualDiff(b'Bazaar-NG Knit Repository Format 1',
 
312
                                 f.read())
304
313
        # XXX: no locks left when unlocked at the moment
305
314
        # self.assertEqualDiff('', t.get('lock').read())
306
 
        self.assertEqualDiff('', t.get('shared-storage').read())
307
 
        self.assertEqualDiff('', t.get('no-working-trees').read())
 
315
        self.assertEqualDiff(b'', t.get('shared-storage').read())
 
316
        self.assertEqualDiff(b'', t.get('no-working-trees').read())
308
317
        repo.set_make_working_trees(True)
309
318
        self.assertFalse(t.has('no-working-trees'))
310
319
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
317
326
        the whole inventory. So we grab the one from the expected text. Which
318
327
        is valid when the api is not being abused.
319
328
        """
320
 
        repo = self.make_repository('.',
321
 
                format=controldir.format_registry.get('knit')())
322
 
        inv_xml = '<inventory format="5">\n</inventory>\n'
323
 
        inv = repo._deserialise_inventory('test-rev-id', inv_xml)
324
 
        self.assertEqual('test-rev-id', inv.root.revision)
 
329
        repo = self.make_repository(
 
330
            '.', format=controldir.format_registry.get('knit')())
 
331
        inv_xml = b'<inventory format="5">\n</inventory>\n'
 
332
        inv = repo._deserialise_inventory(b'test-rev-id', [inv_xml])
 
333
        self.assertEqual(b'test-rev-id', inv.root.revision)
325
334
 
326
335
    def test_deserialise_uses_global_revision_id(self):
327
336
        """If it is set, then we re-use the global revision id"""
328
 
        repo = self.make_repository('.',
329
 
                format=controldir.format_registry.get('knit')())
330
 
        inv_xml = ('<inventory format="5" revision_id="other-rev-id">\n'
331
 
                   '</inventory>\n')
 
337
        repo = self.make_repository(
 
338
            '.', format=controldir.format_registry.get('knit')())
 
339
        inv_xml = (b'<inventory format="5" revision_id="other-rev-id">\n'
 
340
                   b'</inventory>\n')
332
341
        # Arguably, the deserialise_inventory should detect a mismatch, and
333
342
        # raise an error, rather than silently using one revision_id over the
334
343
        # other.
335
344
        self.assertRaises(AssertionError, repo._deserialise_inventory,
336
 
            'test-rev-id', inv_xml)
337
 
        inv = repo._deserialise_inventory('other-rev-id', inv_xml)
338
 
        self.assertEqual('other-rev-id', inv.root.revision)
 
345
                          b'test-rev-id', [inv_xml])
 
346
        inv = repo._deserialise_inventory(b'other-rev-id', [inv_xml])
 
347
        self.assertEqual(b'other-rev-id', inv.root.revision)
339
348
 
340
349
    def test_supports_external_lookups(self):
341
 
        repo = self.make_repository('.',
342
 
                format=controldir.format_registry.get('knit')())
 
350
        repo = self.make_repository(
 
351
            '.', format=controldir.format_registry.get('knit')())
343
352
        self.assertFalse(repo._format.supports_external_lookups)
344
353
 
345
354
 
373
382
    def is_compatible(repo_source, repo_target):
374
383
        """InterDummy is compatible with DummyRepository."""
375
384
        return (isinstance(repo_source, DummyRepository) and
376
 
            isinstance(repo_target, DummyRepository))
 
385
                isinstance(repo_target, DummyRepository))
377
386
 
378
387
 
379
388
class TestInterRepository(TestCaseWithTransport):
418
427
        repo = self.make_repository('.')
419
428
        # hack dummies to look like repo somewhat.
420
429
        dummy_a._serializer = repo._serializer
421
 
        dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
 
430
        dummy_a._format.supports_tree_reference = (
 
431
            repo._format.supports_tree_reference)
422
432
        dummy_a._format.rich_root_data = repo._format.rich_root_data
423
 
        dummy_a._format.supports_full_versioned_files = repo._format.supports_full_versioned_files
 
433
        dummy_a._format.supports_full_versioned_files = (
 
434
            repo._format.supports_full_versioned_files)
424
435
        dummy_b._serializer = repo._serializer
425
 
        dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
 
436
        dummy_b._format.supports_tree_reference = (
 
437
            repo._format.supports_tree_reference)
426
438
        dummy_b._format.rich_root_data = repo._format.rich_root_data
427
 
        dummy_b._format.supports_full_versioned_files = repo._format.supports_full_versioned_files
 
439
        dummy_b._format.supports_full_versioned_files = (
 
440
            repo._format.supports_full_versioned_files)
428
441
        repository.InterRepository.register_optimiser(InterDummy)
429
442
        try:
430
443
            # we should get the default for something InterDummy returns False
447
460
 
448
461
    @classmethod
449
462
    def get_format_string(cls):
450
 
        return "Test Format 1"
 
463
        return b"Test Format 1"
451
464
 
452
465
 
453
466
class TestRepositoryFormat2(knitrepo.RepositoryFormatKnit1):
454
467
 
455
468
    @classmethod
456
469
    def get_format_string(cls):
457
 
        return "Test Format 2"
 
470
        return b"Test Format 2"
458
471
 
459
472
 
460
473
class TestRepositoryConverter(TestCaseWithTransport):
464
477
        target_format = TestRepositoryFormat2()
465
478
        repository.format_registry.register(source_format)
466
479
        self.addCleanup(repository.format_registry.remove,
467
 
            source_format)
 
480
                        source_format)
468
481
        repository.format_registry.register(target_format)
469
482
        self.addCleanup(repository.format_registry.remove,
470
 
            target_format)
 
483
                        target_format)
471
484
        t = self.get_transport()
472
485
        t.mkdir('repository')
473
486
        repo_dir = bzrdir.BzrDirMetaFormat1().initialize('repository')
474
487
        repo = TestRepositoryFormat1().initialize(repo_dir)
475
488
        converter = repository.CopyConverter(target_format)
476
 
        pb = breezy.ui.ui_factory.nested_progress_bar()
477
 
        try:
 
489
        with breezy.ui.ui_factory.nested_progress_bar() as pb:
478
490
            converter.convert(repo, pb)
479
 
        finally:
480
 
            pb.finished()
481
491
        repo = repo_dir.open_repository()
482
492
        self.assertTrue(isinstance(target_format, repo._format.__class__))
483
493
 
503
513
        format = bzrdir.BzrDirMetaFormat1()
504
514
        format.repository_format = knitrepo.RepositoryFormatKnit1()
505
515
        tree = self.make_branch_and_tree('.', format)
506
 
        tree.commit("Dull commit", rev_id="dull")
507
 
        revision_tree = tree.branch.repository.revision_tree('dull')
508
 
        revision_tree.lock_read()
509
 
        try:
510
 
            self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
511
 
                u'', revision_tree.get_root_id())
512
 
        finally:
513
 
            revision_tree.unlock()
 
516
        tree.commit("Dull commit", rev_id=b"dull")
 
517
        revision_tree = tree.branch.repository.revision_tree(b'dull')
 
518
        with revision_tree.lock_read():
 
519
            self.assertRaises(
 
520
                errors.NoSuchFile, revision_tree.get_file_lines, u'')
514
521
        format = bzrdir.BzrDirMetaFormat1()
515
522
        format.repository_format = knitrepo.RepositoryFormatKnit3()
516
523
        upgrade.Convert('.', format)
517
524
        tree = workingtree.WorkingTree.open('.')
518
 
        revision_tree = tree.branch.repository.revision_tree('dull')
519
 
        revision_tree.lock_read()
520
 
        try:
521
 
            revision_tree.get_file_lines(u'', revision_tree.get_root_id())
522
 
        finally:
523
 
            revision_tree.unlock()
524
 
        tree.commit("Another dull commit", rev_id='dull2')
525
 
        revision_tree = tree.branch.repository.revision_tree('dull2')
 
525
        revision_tree = tree.branch.repository.revision_tree(b'dull')
 
526
        with revision_tree.lock_read():
 
527
            revision_tree.get_file_lines(u'')
 
528
        tree.commit("Another dull commit", rev_id=b'dull2')
 
529
        revision_tree = tree.branch.repository.revision_tree(b'dull2')
526
530
        revision_tree.lock_read()
527
531
        self.addCleanup(revision_tree.unlock)
528
 
        self.assertEqual('dull',
529
 
                revision_tree.get_file_revision(u'', revision_tree.get_root_id()))
 
532
        self.assertEqual(b'dull', revision_tree.get_file_revision(u''))
530
533
 
531
534
    def test_supports_external_lookups(self):
532
535
        format = bzrdir.BzrDirMetaFormat1()
541
544
        mt = self.make_branch_and_memory_tree('test', format='2a')
542
545
        mt.lock_write()
543
546
        self.addCleanup(mt.unlock)
544
 
        mt.add([''], ['root-id'])
 
547
        mt.add([''], [b'root-id'])
545
548
        mt.commit('first')
546
549
        index = mt.branch.repository.chk_bytes._index._graph_index._indices[0]
547
550
        self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
556
559
        builder = self.make_branch_builder('source', format='2a')
557
560
        builder.start_series()
558
561
        builder.build_snapshot(None, [
559
 
            ('add', ('', 'root-id', 'directory', '')),
560
 
            ('add', ('file', 'file-id', 'file', 'content\n'))],
561
 
            revision_id='1')
562
 
        builder.build_snapshot(['1'], [
563
 
            ('modify', ('file-id', 'content-2\n'))],
564
 
            revision_id='2')
565
 
        builder.finish_series()
566
 
        source = builder.get_branch()
567
 
        target = self.make_repository('target', format='2a')
568
 
        target.fetch(source.repository)
569
 
        target.lock_read()
570
 
        self.addCleanup(target.unlock)
571
 
        details = target.texts._index.get_build_details(
572
 
            [('file-id', '1',), ('file-id', '2',)])
573
 
        file_1_details = details[('file-id', '1')]
574
 
        file_2_details = details[('file-id', '2')]
575
 
        # The index, and what to read off disk, should be the same for both
576
 
        # versions of the file.
577
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
578
 
 
579
 
    def test_fetch_combines_groups(self):
580
 
        builder = self.make_branch_builder('source', format='2a')
581
 
        builder.start_series()
582
 
        builder.build_snapshot(None, [
583
 
            ('add', ('', 'root-id', 'directory', '')),
584
 
            ('add', ('file', 'file-id', 'file', 'content\n'))],
585
 
            revision_id='1')
586
 
        builder.build_snapshot(['1'], [
587
 
            ('modify', ('file-id', 'content-2\n'))],
588
 
            revision_id='2')
589
 
        builder.finish_series()
590
 
        source = builder.get_branch()
591
 
        target = self.make_repository('target', format='2a')
592
 
        target.fetch(source.repository)
593
 
        target.lock_read()
594
 
        self.addCleanup(target.unlock)
595
 
        details = target.texts._index.get_build_details(
596
 
            [('file-id', '1',), ('file-id', '2',)])
597
 
        file_1_details = details[('file-id', '1')]
598
 
        file_2_details = details[('file-id', '2')]
599
 
        # The index, and what to read off disk, should be the same for both
600
 
        # versions of the file.
601
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
602
 
 
603
 
    def test_fetch_combines_groups(self):
604
 
        builder = self.make_branch_builder('source', format='2a')
605
 
        builder.start_series()
606
 
        builder.build_snapshot(None, [
607
 
            ('add', ('', 'root-id', 'directory', '')),
608
 
            ('add', ('file', 'file-id', 'file', 'content\n'))],
609
 
            revision_id='1')
610
 
        builder.build_snapshot(['1'], [
611
 
            ('modify', ('file-id', 'content-2\n'))],
612
 
            revision_id='2')
613
 
        builder.finish_series()
614
 
        source = builder.get_branch()
615
 
        target = self.make_repository('target', format='2a')
616
 
        target.fetch(source.repository)
617
 
        target.lock_read()
618
 
        self.addCleanup(target.unlock)
619
 
        details = target.texts._index.get_build_details(
620
 
            [('file-id', '1',), ('file-id', '2',)])
621
 
        file_1_details = details[('file-id', '1')]
622
 
        file_2_details = details[('file-id', '2')]
 
562
            ('add', ('', b'root-id', 'directory', '')),
 
563
            ('add', ('file', b'file-id', 'file', b'content\n'))],
 
564
            revision_id=b'1')
 
565
        builder.build_snapshot([b'1'], [
 
566
            ('modify', ('file', b'content-2\n'))],
 
567
            revision_id=b'2')
 
568
        builder.finish_series()
 
569
        source = builder.get_branch()
 
570
        target = self.make_repository('target', format='2a')
 
571
        target.fetch(source.repository)
 
572
        target.lock_read()
 
573
        self.addCleanup(target.unlock)
 
574
        details = target.texts._index.get_build_details(
 
575
            [(b'file-id', b'1',), (b'file-id', b'2',)])
 
576
        file_1_details = details[(b'file-id', b'1')]
 
577
        file_2_details = details[(b'file-id', b'2')]
 
578
        # The index, and what to read off disk, should be the same for both
 
579
        # versions of the file.
 
580
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
581
 
 
582
    def test_fetch_combines_groups(self):
 
583
        builder = self.make_branch_builder('source', format='2a')
 
584
        builder.start_series()
 
585
        builder.build_snapshot(None, [
 
586
            ('add', ('', b'root-id', 'directory', '')),
 
587
            ('add', ('file', b'file-id', 'file', b'content\n'))],
 
588
            revision_id=b'1')
 
589
        builder.build_snapshot([b'1'], [
 
590
            ('modify', ('file', b'content-2\n'))],
 
591
            revision_id=b'2')
 
592
        builder.finish_series()
 
593
        source = builder.get_branch()
 
594
        target = self.make_repository('target', format='2a')
 
595
        target.fetch(source.repository)
 
596
        target.lock_read()
 
597
        self.addCleanup(target.unlock)
 
598
        details = target.texts._index.get_build_details(
 
599
            [(b'file-id', b'1',), (b'file-id', b'2',)])
 
600
        file_1_details = details[(b'file-id', b'1')]
 
601
        file_2_details = details[(b'file-id', b'2')]
 
602
        # The index, and what to read off disk, should be the same for both
 
603
        # versions of the file.
 
604
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
605
 
 
606
    def test_fetch_combines_groups(self):
 
607
        builder = self.make_branch_builder('source', format='2a')
 
608
        builder.start_series()
 
609
        builder.build_snapshot(None, [
 
610
            ('add', ('', b'root-id', 'directory', '')),
 
611
            ('add', ('file', b'file-id', 'file', b'content\n'))],
 
612
            revision_id=b'1')
 
613
        builder.build_snapshot([b'1'], [
 
614
            ('modify', ('file', b'content-2\n'))],
 
615
            revision_id=b'2')
 
616
        builder.finish_series()
 
617
        source = builder.get_branch()
 
618
        target = self.make_repository('target', format='2a')
 
619
        target.fetch(source.repository)
 
620
        target.lock_read()
 
621
        self.addCleanup(target.unlock)
 
622
        details = target.texts._index.get_build_details(
 
623
            [(b'file-id', b'1',), (b'file-id', b'2',)])
 
624
        file_1_details = details[(b'file-id', b'1')]
 
625
        file_2_details = details[(b'file-id', b'2')]
623
626
        # The index, and what to read off disk, should be the same for both
624
627
        # versions of the file.
625
628
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
631
634
    def test_inventories_use_chk_map_with_parent_base_dict(self):
632
635
        tree = self.make_branch_and_memory_tree('repo', format="2a")
633
636
        tree.lock_write()
634
 
        tree.add([''], ['TREE_ROOT'])
 
637
        tree.add([''], [b'TREE_ROOT'])
635
638
        revid = tree.commit("foo")
636
639
        tree.unlock()
637
640
        tree.lock_read()
641
644
        inv.parent_id_basename_to_file_id._ensure_root()
642
645
        inv.id_to_entry._ensure_root()
643
646
        self.assertEqual(65536, inv.id_to_entry._root_node.maximum_size)
644
 
        self.assertEqual(65536,
645
 
            inv.parent_id_basename_to_file_id._root_node.maximum_size)
 
647
        self.assertEqual(
 
648
            65536, inv.parent_id_basename_to_file_id._root_node.maximum_size)
646
649
 
647
650
    def test_autopack_unchanged_chk_nodes(self):
648
651
        # at 20 unchanged commits, chk pages are packed that are split into
652
655
        tree = self.make_branch_and_memory_tree('tree', format='2a')
653
656
        tree.lock_write()
654
657
        self.addCleanup(tree.unlock)
655
 
        tree.add([''], ['TREE_ROOT'])
 
658
        tree.add([''], [b'TREE_ROOT'])
656
659
        for pos in range(20):
657
660
            tree.commit(str(pos))
658
661
 
660
663
        tree = self.make_branch_and_memory_tree('tree', format='2a')
661
664
        tree.lock_write()
662
665
        self.addCleanup(tree.unlock)
663
 
        tree.add([''], ['TREE_ROOT'])
 
666
        tree.add([''], [b'TREE_ROOT'])
664
667
        # 1 commit to leave untouched
665
668
        tree.commit('1')
666
669
        to_keep = tree.branch.repository._pack_collection.names()
693
696
 
694
697
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
695
698
        source_builder = self.make_branch_builder('source',
696
 
                            format='2a')
 
699
                                                  format='2a')
697
700
        # We have to build a fairly large tree, so that we are sure the chk
698
701
        # pages will have split into multiple pages.
699
 
        entries = [('add', ('', 'a-root-id', 'directory', None))]
 
702
        entries = [('add', ('', b'a-root-id', 'directory', None))]
700
703
        for i in 'abcdefghijklmnopqrstuvwxyz123456789':
701
704
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
702
705
                fname = i + j
703
 
                fid = fname + '-id'
704
 
                content = 'content for %s\n' % (fname,)
 
706
                fid = fname.encode('utf-8') + b'-id'
 
707
                content = b'content for %s\n' % (fname.encode('utf-8'),)
705
708
                entries.append(('add', (fname, fid, 'file', content)))
706
709
        source_builder.start_series()
707
 
        source_builder.build_snapshot(None, entries, revision_id='rev-1')
 
710
        source_builder.build_snapshot(None, entries, revision_id=b'rev-1')
708
711
        # Now change a few of them, so we get a few new pages for the second
709
712
        # revision
710
 
        source_builder.build_snapshot(['rev-1'], [
711
 
            ('modify', ('aa-id', 'new content for aa-id\n')),
712
 
            ('modify', ('cc-id', 'new content for cc-id\n')),
713
 
            ('modify', ('zz-id', 'new content for zz-id\n')),
714
 
            ], revision_id='rev-2')
 
713
        source_builder.build_snapshot([b'rev-1'], [
 
714
            ('modify', ('aa', b'new content for aa-id\n')),
 
715
            ('modify', ('cc', b'new content for cc-id\n')),
 
716
            ('modify', ('zz', b'new content for zz-id\n')),
 
717
            ], revision_id=b'rev-2')
715
718
        source_builder.finish_series()
716
719
        source_branch = source_builder.get_branch()
717
720
        source_branch.lock_read()
722
725
 
723
726
        # On a regular pass, getting the inventories and chk pages for rev-2
724
727
        # would only get the newly created chk pages
725
 
        search = vf_search.SearchResult({'rev-2'}, {'rev-1'}, 1,
726
 
                                    {'rev-2'})
727
 
        simple_chk_records = []
 
728
        search = vf_search.SearchResult({b'rev-2'}, {b'rev-1'}, 1,
 
729
                                        {b'rev-2'})
 
730
        simple_chk_records = set()
728
731
        for vf_name, substream in source.get_stream(search):
729
732
            if vf_name == 'chk_bytes':
730
733
                for record in substream:
731
 
                    simple_chk_records.append(record.key)
 
734
                    simple_chk_records.add(record.key)
732
735
            else:
733
736
                for _ in substream:
734
737
                    continue
735
738
        # 3 pages, the root (InternalNode), + 2 pages which actually changed
736
 
        self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
737
 
                          ('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
738
 
                          ('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
739
 
                          ('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
740
 
                         simple_chk_records)
 
739
        self.assertEqual({(b'sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
 
740
                          (b'sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
 
741
                          (b'sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
 
742
                          (b'sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)},
 
743
                         set(simple_chk_records))
741
744
        # Now, when we do a similar call using 'get_stream_for_missing_keys'
742
745
        # we should get a much larger set of pages.
743
 
        missing = [('inventories', 'rev-2')]
744
 
        full_chk_records = []
 
746
        missing = [('inventories', b'rev-2')]
 
747
        full_chk_records = set()
745
748
        for vf_name, substream in source.get_stream_for_missing_keys(missing):
746
749
            if vf_name == 'inventories':
747
750
                for record in substream:
748
 
                    self.assertEqual(('rev-2',), record.key)
 
751
                    self.assertEqual((b'rev-2',), record.key)
749
752
            elif vf_name == 'chk_bytes':
750
753
                for record in substream:
751
 
                    full_chk_records.append(record.key)
 
754
                    full_chk_records.add(record.key)
752
755
            else:
753
756
                self.fail('Should not be getting a stream of %s' % (vf_name,))
754
757
        # We have 257 records now. This is because we have 1 root page, and 256
771
774
        source = self.make_repository('source', format='pack-0.92')
772
775
        target = self.make_repository('target', format='pack-0.92')
773
776
        stream_source = source._get_source(target._format)
774
 
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
777
        self.assertIsInstance(
 
778
            stream_source, knitpack_repo.KnitPackStreamSource)
775
779
 
776
780
    def test_source_to_exact_pack_rich_root_pack(self):
777
781
        source = self.make_repository('source', format='rich-root-pack')
778
782
        target = self.make_repository('target', format='rich-root-pack')
779
783
        stream_source = source._get_source(target._format)
780
 
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
784
        self.assertIsInstance(
 
785
            stream_source, knitpack_repo.KnitPackStreamSource)
781
786
 
782
787
    def test_source_to_exact_pack_19(self):
783
788
        source = self.make_repository('source', format='1.9')
784
789
        target = self.make_repository('target', format='1.9')
785
790
        stream_source = source._get_source(target._format)
786
 
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
791
        self.assertIsInstance(
 
792
            stream_source, knitpack_repo.KnitPackStreamSource)
787
793
 
788
794
    def test_source_to_exact_pack_19_rich_root(self):
789
795
        source = self.make_repository('source', format='1.9-rich-root')
790
796
        target = self.make_repository('target', format='1.9-rich-root')
791
797
        stream_source = source._get_source(target._format)
792
 
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
798
        self.assertIsInstance(
 
799
            stream_source, knitpack_repo.KnitPackStreamSource)
793
800
 
794
801
    def test_source_to_remote_exact_pack_19(self):
795
802
        trans = self.make_smart_server('target')
798
805
        target = self.make_repository('target', format='1.9')
799
806
        target = repository.Repository.open(trans.base)
800
807
        stream_source = source._get_source(target._format)
801
 
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
808
        self.assertIsInstance(
 
809
            stream_source, knitpack_repo.KnitPackStreamSource)
802
810
 
803
811
    def test_stream_source_to_non_exact(self):
804
812
        source = self.make_repository('source', format='pack-0.92')
835
843
        super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
836
844
        self.builder = self.make_branch_builder('source')
837
845
        self.builder.start_series()
838
 
        self.builder.build_snapshot(None,
839
 
            [('add', ('', 'tree-root', 'directory', None))],
840
 
            revision_id='initial')
 
846
        self.builder.build_snapshot(
 
847
            None,
 
848
            [('add', ('', b'tree-root', 'directory', None))],
 
849
            revision_id=b'initial')
841
850
        self.repo = self.builder.get_branch().repository
842
851
        self.addCleanup(self.builder.finish_series)
843
852
 
844
853
    def assertParentIds(self, expected_result, rev_set):
845
 
        self.assertEqual(sorted(expected_result),
 
854
        self.assertEqual(
 
855
            sorted(expected_result),
846
856
            sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
847
857
 
848
858
    def test_simple(self):
849
 
        self.builder.build_snapshot(None, [], revision_id='revid1')
850
 
        self.builder.build_snapshot(['revid1'], [], revision_id='revid2')
851
 
        rev_set = ['revid2']
852
 
        self.assertParentIds(['revid1'], rev_set)
 
859
        self.builder.build_snapshot(None, [], revision_id=b'revid1')
 
860
        self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2')
 
861
        rev_set = [b'revid2']
 
862
        self.assertParentIds([b'revid1'], rev_set)
853
863
 
854
864
    def test_not_first_parent(self):
855
 
        self.builder.build_snapshot(None, [], revision_id='revid1')
856
 
        self.builder.build_snapshot(['revid1'], [], revision_id='revid2')
857
 
        self.builder.build_snapshot(['revid2'], [], revision_id='revid3')
858
 
        rev_set = ['revid3', 'revid2']
859
 
        self.assertParentIds(['revid1'], rev_set)
 
865
        self.builder.build_snapshot(None, [], revision_id=b'revid1')
 
866
        self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2')
 
867
        self.builder.build_snapshot([b'revid2'], [], revision_id=b'revid3')
 
868
        rev_set = [b'revid3', b'revid2']
 
869
        self.assertParentIds([b'revid1'], rev_set)
860
870
 
861
871
    def test_not_null(self):
862
 
        rev_set = ['initial']
 
872
        rev_set = [b'initial']
863
873
        self.assertParentIds([], rev_set)
864
874
 
865
875
    def test_not_null_set(self):
866
 
        self.builder.build_snapshot(None, [], revision_id='revid1')
 
876
        self.builder.build_snapshot(None, [], revision_id=b'revid1')
867
877
        rev_set = [_mod_revision.NULL_REVISION]
868
878
        self.assertParentIds([], rev_set)
869
879
 
870
880
    def test_ghost(self):
871
 
        self.builder.build_snapshot(None, [], revision_id='revid1')
872
 
        rev_set = ['ghost', 'revid1']
873
 
        self.assertParentIds(['initial'], rev_set)
 
881
        self.builder.build_snapshot(None, [], revision_id=b'revid1')
 
882
        rev_set = [b'ghost', b'revid1']
 
883
        self.assertParentIds([b'initial'], rev_set)
874
884
 
875
885
    def test_ghost_parent(self):
876
 
        self.builder.build_snapshot(None, [], revision_id='revid1')
877
 
        self.builder.build_snapshot(['revid1', 'ghost'], [], revision_id='revid2')
878
 
        rev_set = ['revid2', 'revid1']
879
 
        self.assertParentIds(['ghost', 'initial'], rev_set)
 
886
        self.builder.build_snapshot(None, [], revision_id=b'revid1')
 
887
        self.builder.build_snapshot(
 
888
            [b'revid1', b'ghost'], [], revision_id=b'revid2')
 
889
        rev_set = [b'revid2', b'revid1']
 
890
        self.assertParentIds([b'ghost', b'initial'], rev_set)
880
891
 
881
892
    def test_righthand_parent(self):
882
 
        self.builder.build_snapshot(None, [], revision_id='revid1')
883
 
        self.builder.build_snapshot(['revid1'], [], revision_id='revid2a')
884
 
        self.builder.build_snapshot(['revid1'], [], revision_id='revid2b')
885
 
        self.builder.build_snapshot(['revid2a', 'revid2b'], [],
886
 
                                    revision_id='revid3')
887
 
        rev_set = ['revid3', 'revid2a']
888
 
        self.assertParentIds(['revid1', 'revid2b'], rev_set)
 
893
        self.builder.build_snapshot(None, [], revision_id=b'revid1')
 
894
        self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2a')
 
895
        self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2b')
 
896
        self.builder.build_snapshot([b'revid2a', b'revid2b'], [],
 
897
                                    revision_id=b'revid3')
 
898
        rev_set = [b'revid3', b'revid2a']
 
899
        self.assertParentIds([b'revid1', b'revid2b'], rev_set)
889
900
 
890
901
 
891
902
class TestWithBrokenRepo(TestCaseWithTransport):
903
914
            repo.start_write_group()
904
915
            cleanups.append(repo.commit_write_group)
905
916
            # make rev1a: A well-formed revision, containing 'file1'
906
 
            inv = inventory.Inventory(revision_id='rev1a')
907
 
            inv.root.revision = 'rev1a'
908
 
            self.add_file(repo, inv, 'file1', 'rev1a', [])
909
 
            repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
910
 
            repo.add_inventory('rev1a', inv, [])
911
 
            revision = _mod_revision.Revision('rev1a',
 
917
            inv = inventory.Inventory(revision_id=b'rev1a')
 
918
            inv.root.revision = b'rev1a'
 
919
            self.add_file(repo, inv, 'file1', b'rev1a', [])
 
920
            repo.texts.add_lines((inv.root.file_id, b'rev1a'), [], [])
 
921
            repo.add_inventory(b'rev1a', inv, [])
 
922
            revision = _mod_revision.Revision(
 
923
                b'rev1a',
912
924
                committer='jrandom@example.com', timestamp=0,
913
925
                inventory_sha1='', timezone=0, message='foo', parent_ids=[])
914
 
            repo.add_revision('rev1a', revision, inv)
 
926
            repo.add_revision(b'rev1a', revision, inv)
915
927
 
916
928
            # make rev1b, which has no Revision, but has an Inventory, and
917
929
            # file1
918
 
            inv = inventory.Inventory(revision_id='rev1b')
919
 
            inv.root.revision = 'rev1b'
920
 
            self.add_file(repo, inv, 'file1', 'rev1b', [])
921
 
            repo.add_inventory('rev1b', inv, [])
 
930
            inv = inventory.Inventory(revision_id=b'rev1b')
 
931
            inv.root.revision = b'rev1b'
 
932
            self.add_file(repo, inv, 'file1', b'rev1b', [])
 
933
            repo.add_inventory(b'rev1b', inv, [])
922
934
 
923
935
            # make rev2, with file1 and file2
924
936
            # file2 is sane
925
937
            # file1 has 'rev1b' as an ancestor, even though this is not
926
938
            # mentioned by 'rev1a', making it an unreferenced ancestor
927
939
            inv = inventory.Inventory()
928
 
            self.add_file(repo, inv, 'file1', 'rev2', ['rev1a', 'rev1b'])
929
 
            self.add_file(repo, inv, 'file2', 'rev2', [])
930
 
            self.add_revision(repo, 'rev2', inv, ['rev1a'])
 
940
            self.add_file(repo, inv, 'file1', b'rev2', [b'rev1a', b'rev1b'])
 
941
            self.add_file(repo, inv, 'file2', b'rev2', [])
 
942
            self.add_revision(repo, b'rev2', inv, [b'rev1a'])
931
943
 
932
944
            # make ghost revision rev1c
933
945
            inv = inventory.Inventory()
934
 
            self.add_file(repo, inv, 'file2', 'rev1c', [])
 
946
            self.add_file(repo, inv, 'file2', b'rev1c', [])
935
947
 
936
948
            # make rev3 with file2
937
949
            # file2 refers to 'rev1c', which is a ghost in this repository, so
938
950
            # file2 cannot have rev1c as its ancestor.
939
951
            inv = inventory.Inventory()
940
 
            self.add_file(repo, inv, 'file2', 'rev3', ['rev1c'])
941
 
            self.add_revision(repo, 'rev3', inv, ['rev1c'])
 
952
            self.add_file(repo, inv, 'file2', b'rev3', [b'rev1c'])
 
953
            self.add_revision(repo, b'rev3', inv, [b'rev1c'])
942
954
            return repo
943
955
        finally:
944
956
            for cleanup in reversed(cleanups):
949
961
        inv.root.revision = revision_id
950
962
        repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
951
963
        repo.add_inventory(revision_id, inv, parent_ids)
952
 
        revision = _mod_revision.Revision(revision_id,
 
964
        revision = _mod_revision.Revision(
 
965
            revision_id,
953
966
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
954
967
            timezone=0, message='foo', parent_ids=parent_ids)
955
968
        repo.add_revision(revision_id, revision, inv)
956
969
 
957
970
    def add_file(self, repo, inv, filename, revision, parents):
958
 
        file_id = filename + '-id'
959
 
        entry = inventory.InventoryFile(file_id, filename, 'TREE_ROOT')
 
971
        file_id = filename.encode('utf-8') + b'-id'
 
972
        content = [b'line\n']
 
973
        entry = inventory.InventoryFile(file_id, filename, b'TREE_ROOT')
960
974
        entry.revision = revision
 
975
        entry.text_sha1 = osutils.sha_strings(content)
961
976
        entry.text_size = 0
962
977
        inv.add(entry)
963
978
        text_key = (file_id, revision)
964
979
        parent_keys = [(file_id, parent) for parent in parents]
965
 
        repo.texts.add_lines(text_key, parent_keys, ['line\n'])
 
980
        repo.texts.add_lines(text_key, parent_keys, content)
966
981
 
967
982
    def test_insert_from_broken_repo(self):
968
983
        """Inserting a data stream from a broken repository won't silently
979
994
        empty_repo.lock_read()
980
995
        self.addCleanup(empty_repo.unlock)
981
996
        text = next(empty_repo.texts.get_record_stream(
982
 
            [('file2-id', 'rev3')], 'topological', True))
983
 
        self.assertEqual('line\n', text.get_bytes_as('fulltext'))
 
997
            [(b'file2-id', b'rev3')], 'topological', True))
 
998
        self.assertEqual(b'line\n', text.get_bytes_as('fulltext'))
984
999
 
985
1000
 
986
1001
class TestRepositoryPackCollection(TestCaseWithTransport):
1014
1029
    def test__clear_obsolete_packs(self):
1015
1030
        packs = self.get_packs()
1016
1031
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1017
 
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1018
 
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1019
 
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1020
 
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1021
 
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
 
1032
        obsolete_pack_trans.put_bytes('a-pack.pack', b'content\n')
 
1033
        obsolete_pack_trans.put_bytes('a-pack.rix', b'content\n')
 
1034
        obsolete_pack_trans.put_bytes('a-pack.iix', b'content\n')
 
1035
        obsolete_pack_trans.put_bytes('another-pack.pack', b'foo\n')
 
1036
        obsolete_pack_trans.put_bytes('not-a-pack.rix', b'foo\n')
1022
1037
        res = packs._clear_obsolete_packs()
1023
1038
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1024
1039
        self.assertEqual([], obsolete_pack_trans.list_dir('.'))
1026
1041
    def test__clear_obsolete_packs_preserve(self):
1027
1042
        packs = self.get_packs()
1028
1043
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1029
 
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1030
 
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1031
 
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1032
 
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1033
 
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
 
1044
        obsolete_pack_trans.put_bytes('a-pack.pack', b'content\n')
 
1045
        obsolete_pack_trans.put_bytes('a-pack.rix', b'content\n')
 
1046
        obsolete_pack_trans.put_bytes('a-pack.iix', b'content\n')
 
1047
        obsolete_pack_trans.put_bytes('another-pack.pack', b'foo\n')
 
1048
        obsolete_pack_trans.put_bytes('not-a-pack.rix', b'foo\n')
1034
1049
        res = packs._clear_obsolete_packs(preserve={'a-pack'})
1035
1050
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1036
1051
        self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
1064
1079
    def test_repr(self):
1065
1080
        packs = self.get_packs()
1066
1081
        self.assertContainsRe(repr(packs),
1067
 
            'RepositoryPackCollection(.*Repository(.*))')
 
1082
                              'RepositoryPackCollection(.*Repository(.*))')
1068
1083
 
1069
1084
    def test__obsolete_packs(self):
1070
1085
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1085
1100
                         sorted(packs._pack_transport.list_dir('.')))
1086
1101
        # names[0] should not be present in the index anymore
1087
1102
        self.assertEqual(names[1:],
1088
 
            sorted({osutils.splitext(n)[0] for n in
1089
 
                        packs._index_transport.list_dir('.')}))
 
1103
                         sorted({osutils.splitext(n)[0] for n in
 
1104
                                 packs._index_transport.list_dir('.')}))
1090
1105
 
1091
1106
    def test__obsolete_packs_missing_directory(self):
1092
1107
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1102
1117
                         sorted(packs._pack_transport.list_dir('.')))
1103
1118
        # names[0] should not be present in the index anymore
1104
1119
        self.assertEqual(names[1:],
1105
 
            sorted({osutils.splitext(n)[0] for n in
1106
 
                        packs._index_transport.list_dir('.')}))
 
1120
                         sorted({osutils.splitext(n)[0] for n in
 
1121
                                 packs._index_transport.list_dir('.')}))
1107
1122
 
1108
1123
    def test_pack_distribution_zero(self):
1109
1124
        packs = self.get_packs()
1117
1132
    def test_pack_distribution_one_to_nine(self):
1118
1133
        packs = self.get_packs()
1119
1134
        self.assertEqual([1],
1120
 
            packs.pack_distribution(1))
 
1135
                         packs.pack_distribution(1))
1121
1136
        self.assertEqual([1, 1],
1122
 
            packs.pack_distribution(2))
 
1137
                         packs.pack_distribution(2))
1123
1138
        self.assertEqual([1, 1, 1],
1124
 
            packs.pack_distribution(3))
 
1139
                         packs.pack_distribution(3))
1125
1140
        self.assertEqual([1, 1, 1, 1],
1126
 
            packs.pack_distribution(4))
 
1141
                         packs.pack_distribution(4))
1127
1142
        self.assertEqual([1, 1, 1, 1, 1],
1128
 
            packs.pack_distribution(5))
 
1143
                         packs.pack_distribution(5))
1129
1144
        self.assertEqual([1, 1, 1, 1, 1, 1],
1130
 
            packs.pack_distribution(6))
 
1145
                         packs.pack_distribution(6))
1131
1146
        self.assertEqual([1, 1, 1, 1, 1, 1, 1],
1132
 
            packs.pack_distribution(7))
 
1147
                         packs.pack_distribution(7))
1133
1148
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1],
1134
 
            packs.pack_distribution(8))
 
1149
                         packs.pack_distribution(8))
1135
1150
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1, 1],
1136
 
            packs.pack_distribution(9))
 
1151
                         packs.pack_distribution(9))
1137
1152
 
1138
1153
    def test_pack_distribution_stable_at_boundaries(self):
1139
1154
        """When there are multi-rev packs the counts are stable."""
1170
1185
    def test_plan_pack_operations_2010_combines_smallest_two(self):
1171
1186
        packs = self.get_packs()
1172
1187
        existing_packs = [(1999, "big"), (9, "medium"), (1, "single2"),
1173
 
            (1, "single1")]
 
1188
                          (1, "single1")]
1174
1189
        # rev count - 2010 -> 2x1000 + 1x10 (3)
1175
1190
        pack_operations = packs.plan_autopack_combinations(
1176
1191
            existing_packs, [1000, 1000, 10])
1243
1258
        inv_index = GraphIndex(packs._index_transport, name + '.iix', sizes[1])
1244
1259
        txt_index = GraphIndex(packs._index_transport, name + '.tix', sizes[2])
1245
1260
        sig_index = GraphIndex(packs._index_transport, name + '.six', sizes[3])
1246
 
        self.assertEqual(pack_repo.ExistingPack(packs._pack_transport,
1247
 
            name, rev_index, inv_index, txt_index, sig_index), pack_1)
 
1261
        self.assertEqual(
 
1262
            pack_repo.ExistingPack(
 
1263
                packs._pack_transport, name, rev_index, inv_index, txt_index,
 
1264
                sig_index), pack_1)
1248
1265
        # and the same instance should be returned on successive calls.
1249
1266
        self.assertTrue(pack_1 is packs.get_pack_by_name(name))
1250
1267
 
1262
1279
        self.assertTrue(packs.reload_pack_names())
1263
1280
        self.assertEqual(new_names, packs.names())
1264
1281
        # And the repository can access the new revision
1265
 
        self.assertEqual({rev4:(revs[-1],)}, r.get_parent_map([rev4]))
 
1282
        self.assertEqual({rev4: (revs[-1],)}, r.get_parent_map([rev4]))
1266
1283
        self.assertFalse(packs.reload_pack_names())
1267
1284
 
1268
1285
    def test_reload_pack_names_added_and_removed(self):
1275
1292
        self.assertEqual(names, packs.names())
1276
1293
        self.assertTrue(packs.reload_pack_names())
1277
1294
        self.assertEqual(new_names, packs.names())
1278
 
        self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
 
1295
        self.assertEqual({revs[-1]: (revs[-2],)}, r.get_parent_map([revs[-1]]))
1279
1296
        self.assertFalse(packs.reload_pack_names())
1280
1297
 
1281
1298
    def test_reload_pack_names_preserves_pending(self):
1289
1306
        r.start_write_group()
1290
1307
        self.addCleanup(r.abort_write_group)
1291
1308
        r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
1292
 
            ('text', 'rev'), (), None, 'content\n')])
 
1309
            (b'text', b'rev'), (), None, b'content\n')])
1293
1310
        new_pack = packs._new_pack
1294
1311
        self.assertTrue(new_pack.data_inserted())
1295
1312
        new_pack.finish()
1299
1316
        packs._remove_pack_from_memory(removed_pack)
1300
1317
        names = packs.names()
1301
1318
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1302
 
        new_names = {x[0][0] for x in new_nodes}
1303
 
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
 
1319
        new_names = {x[0] for x in new_nodes}
 
1320
        self.assertEqual(names, sorted([x[0] for x in all_nodes]))
1304
1321
        self.assertEqual(set(names) - set(orig_names), new_names)
1305
1322
        self.assertEqual({new_pack.name}, new_names)
1306
1323
        self.assertEqual([to_remove_name],
1307
 
                         sorted([x[0][0] for x in deleted_nodes]))
 
1324
                         sorted([x[0] for x in deleted_nodes]))
1308
1325
        packs.reload_pack_names()
1309
1326
        reloaded_names = packs.names()
1310
1327
        self.assertEqual(orig_at_load, packs._packs_at_load)
1311
1328
        self.assertEqual(names, reloaded_names)
1312
1329
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1313
 
        new_names = {x[0][0] for x in new_nodes}
1314
 
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
 
1330
        new_names = {x[0] for x in new_nodes}
 
1331
        self.assertEqual(names, sorted([x[0] for x in all_nodes]))
1315
1332
        self.assertEqual(set(names) - set(orig_names), new_names)
1316
1333
        self.assertEqual({new_pack.name}, new_names)
1317
1334
        self.assertEqual([to_remove_name],
1318
 
                         sorted([x[0][0] for x in deleted_nodes]))
 
1335
                         sorted([x[0] for x in deleted_nodes]))
1319
1336
 
1320
1337
    def test_autopack_obsoletes_new_pack(self):
1321
1338
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1323
1340
        packs.pack_distribution = lambda x: [10]
1324
1341
        r.start_write_group()
1325
1342
        r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
1326
 
            ('bogus-rev',), (), None, 'bogus-content\n')])
 
1343
            (b'bogus-rev',), (), None, b'bogus-content\n')])
1327
1344
        # This should trigger an autopack, which will combine everything into a
1328
1345
        # single pack file.
1329
 
        new_names = r.commit_write_group()
 
1346
        r.commit_write_group()
1330
1347
        names = packs.names()
1331
1348
        self.assertEqual(1, len(names))
1332
1349
        self.assertEqual([names[0] + '.pack'],
1338
1355
        # full-pack via the other repo which will cause us to re-evaluate and
1339
1356
        # decide we don't need to do anything
1340
1357
        orig_execute = packs._execute_pack_operations
 
1358
 
1341
1359
        def _munged_execute_pack_ops(*args, **kwargs):
1342
1360
            tree.branch.repository.pack()
1343
1361
            return orig_execute(*args, **kwargs)
1456
1474
            index_class=BTreeGraphIndex,
1457
1475
            use_chk_index=False)
1458
1476
        pack = pack_repo.NewPack(collection)
1459
 
        self.addCleanup(pack.abort) # Make sure the write stream gets closed
 
1477
        self.addCleanup(pack.abort)  # Make sure the write stream gets closed
1460
1478
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
1461
1479
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
1462
1480
        self.assertIsInstance(pack._hash, type(osutils.md5()))
1476
1494
        builder = self.make_branch_builder('.', format="1.9")
1477
1495
        builder.start_series()
1478
1496
        builder.build_snapshot(None, [
1479
 
            ('add', ('', 'root-id', 'directory', None)),
1480
 
            ('add', ('f', 'f-id', 'file', 'content\n'))],
1481
 
            revision_id='A')
1482
 
        builder.build_snapshot(['A'],
1483
 
            [('modify', ('f-id', 'new-content\n'))],
1484
 
            revision_id='B')
1485
 
        builder.build_snapshot(['B'],
1486
 
            [('modify', ('f-id', 'third-content\n'))],
1487
 
            revision_id='C')
1488
 
        builder.build_snapshot(['C'],
1489
 
            [('modify', ('f-id', 'fourth-content\n'))],
1490
 
            revision_id='D')
 
1497
            ('add', ('', b'root-id', 'directory', None)),
 
1498
            ('add', ('f', b'f-id', 'file', b'content\n'))],
 
1499
            revision_id=b'A')
 
1500
        builder.build_snapshot([b'A'],
 
1501
                               [('modify', ('f', b'new-content\n'))],
 
1502
                               revision_id=b'B')
 
1503
        builder.build_snapshot([b'B'],
 
1504
                               [('modify', ('f', b'third-content\n'))],
 
1505
                               revision_id=b'C')
 
1506
        builder.build_snapshot([b'C'],
 
1507
                               [('modify', ('f', b'fourth-content\n'))],
 
1508
                               revision_id=b'D')
1491
1509
        b = builder.get_branch()
1492
1510
        b.lock_read()
1493
1511
        builder.finish_series()
1497
1515
        # ['D', 'C', 'B', 'A']
1498
1516
        packs = b.repository._pack_collection.packs
1499
1517
        packer = knitpack_repo.KnitPacker(b.repository._pack_collection,
1500
 
                                  packs, 'testing',
1501
 
                                  revision_ids=['B', 'C'])
 
1518
                                          packs, 'testing',
 
1519
                                          revision_ids=[b'B', b'C'])
1502
1520
        # Now, when we are copying the B & C revisions, their pack files should
1503
1521
        # be moved to the front of the stack
1504
1522
        # The new ordering moves B & C to the front of the .packs attribute,
1505
1523
        # and leaves the others in the original order.
1506
1524
        new_packs = [packs[1], packs[2], packs[0], packs[3]]
1507
 
        new_pack = packer.pack()
 
1525
        packer.pack()
1508
1526
        self.assertEqual(new_packs, packer.packs)
1509
1527
 
1510
1528
 
1517
1535
 
1518
1536
    def test_open_pack_will_optimise(self):
1519
1537
        packer = knitpack_repo.OptimisingKnitPacker(self.get_pack_collection(),
1520
 
                                            [], '.test')
 
1538
                                                    [], '.test')
1521
1539
        new_pack = packer.open_pack()
1522
 
        self.addCleanup(new_pack.abort) # ensure cleanup
 
1540
        self.addCleanup(new_pack.abort)  # ensure cleanup
1523
1541
        self.assertIsInstance(new_pack, pack_repo.NewPack)
1524
1542
        self.assertTrue(new_pack.revision_index._optimize_for_size)
1525
1543
        self.assertTrue(new_pack.inventory_index._optimize_for_size)
1533
1551
        builder = self.make_branch_builder('source')
1534
1552
        builder.start_series()
1535
1553
        builder.build_snapshot(None, [
1536
 
            ('add', ('', 'root-id', 'directory', None)),
1537
 
            ('add', ('file', 'file-id', 'file', 'content\n')),
1538
 
            ], revision_id='A')
1539
 
        builder.build_snapshot(['A'], [
1540
 
            ('add', ('dir', 'dir-id', 'directory', None))],
1541
 
            revision_id='B')
1542
 
        builder.build_snapshot(['B'], [
1543
 
            ('modify', ('file-id', 'new content\n'))],
1544
 
            revision_id='C')
 
1554
            ('add', ('', b'root-id', 'directory', None)),
 
1555
            ('add', ('file', b'file-id', 'file', b'content\n')),
 
1556
            ], revision_id=b'A')
 
1557
        builder.build_snapshot([b'A'], [
 
1558
            ('add', ('dir', b'dir-id', 'directory', None))],
 
1559
            revision_id=b'B')
 
1560
        builder.build_snapshot([b'B'], [
 
1561
            ('modify', ('file', b'new content\n'))],
 
1562
            revision_id=b'C')
1545
1563
        builder.finish_series()
1546
1564
        return builder.get_branch()
1547
1565
 
1557
1575
                  pack_name_with_rev_C_content)
1558
1576
        """
1559
1577
        b_source = self.make_abc_branch()
1560
 
        b_base = b_source.controldir.sprout('base', revision_id='A').open_branch()
1561
 
        b_stacked = b_base.controldir.sprout('stacked', stacked=True).open_branch()
 
1578
        b_base = b_source.controldir.sprout(
 
1579
            'base', revision_id=b'A').open_branch()
 
1580
        b_stacked = b_base.controldir.sprout(
 
1581
            'stacked', stacked=True).open_branch()
1562
1582
        b_stacked.lock_write()
1563
1583
        self.addCleanup(b_stacked.unlock)
1564
 
        b_stacked.fetch(b_source, 'B')
 
1584
        b_stacked.fetch(b_source, b'B')
1565
1585
        # Now re-open the stacked repo directly (no fallbacks) so that we can
1566
1586
        # fill in the A rev.
1567
1587
        repo_not_stacked = b_stacked.controldir.open_repository()
1569
1589
        self.addCleanup(repo_not_stacked.unlock)
1570
1590
        # Now we should have a pack file with A's inventory, but not its
1571
1591
        # Revision
1572
 
        self.assertEqual([('A',), ('B',)],
 
1592
        self.assertEqual([(b'A',), (b'B',)],
1573
1593
                         sorted(repo_not_stacked.inventories.keys()))
1574
 
        self.assertEqual([('B',)],
 
1594
        self.assertEqual([(b'B',)],
1575
1595
                         sorted(repo_not_stacked.revisions.keys()))
1576
1596
        stacked_pack_names = repo_not_stacked._pack_collection.names()
1577
1597
        # We have a couple names here, figure out which has A's inventory
1578
1598
        for name in stacked_pack_names:
1579
1599
            pack = repo_not_stacked._pack_collection.get_pack_by_name(name)
1580
1600
            keys = [n[1] for n in pack.inventory_index.iter_all_entries()]
1581
 
            if ('A',) in keys:
 
1601
            if (b'A',) in keys:
1582
1602
                inv_a_pack_name = name
1583
1603
                break
1584
1604
        else:
1585
1605
            self.fail('Could not find pack containing A\'s inventory')
1586
 
        repo_not_stacked.fetch(b_source.repository, 'A')
1587
 
        self.assertEqual([('A',), ('B',)],
 
1606
        repo_not_stacked.fetch(b_source.repository, b'A')
 
1607
        self.assertEqual([(b'A',), (b'B',)],
1588
1608
                         sorted(repo_not_stacked.revisions.keys()))
1589
1609
        new_pack_names = set(repo_not_stacked._pack_collection.names())
1590
1610
        rev_a_pack_names = new_pack_names.difference(stacked_pack_names)
1591
1611
        self.assertEqual(1, len(rev_a_pack_names))
1592
1612
        rev_a_pack_name = list(rev_a_pack_names)[0]
1593
1613
        # Now fetch 'C', so we have a couple pack files to join
1594
 
        repo_not_stacked.fetch(b_source.repository, 'C')
 
1614
        repo_not_stacked.fetch(b_source.repository, b'C')
1595
1615
        rev_c_pack_names = set(repo_not_stacked._pack_collection.names())
1596
1616
        rev_c_pack_names = rev_c_pack_names.difference(new_pack_names)
1597
1617
        self.assertEqual(1, len(rev_c_pack_names))
1609
1629
        a_pack = repo._pack_collection.get_pack_by_name(rev_a_pack_name)
1610
1630
        c_pack = repo._pack_collection.get_pack_by_name(rev_c_pack_name)
1611
1631
        packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
1612
 
                    [a_pack, c_pack], '.test-pack')
 
1632
                                                [a_pack, c_pack], '.test-pack')
1613
1633
        # This would raise ValueError in bug #437003, but should not raise an
1614
1634
        # error once fixed.
1615
1635
        packer.pack()
1622
1642
        inv_a_pack = repo._pack_collection.get_pack_by_name(inv_a_pack_name)
1623
1643
        repo._pack_collection._remove_pack_from_memory(inv_a_pack)
1624
1644
        packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
1625
 
            repo._pack_collection.all_packs(), '.test-pack')
 
1645
                                                repo._pack_collection.all_packs(), '.test-pack')
1626
1646
        e = self.assertRaises(ValueError, packer.pack)
1627
1647
        packer.new_pack.abort()
1628
1648
        self.assertContainsRe(str(e),
1629
 
            r"We are missing inventories for revisions: .*'A'")
 
1649
                              r"We are missing inventories for revisions: .*'A'")
1630
1650
 
1631
1651
 
1632
1652
class TestCrossFormatPacks(TestCaseWithTransport):
1667
1687
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
1668
1688
        source_tree.lock_write()
1669
1689
        self.addCleanup(source_tree.unlock)
1670
 
        tip = source_tree.commit('foo')
 
1690
        source_tree.commit('foo')
1671
1691
        target = self.make_repository('target', format=target_fmt)
1672
1692
        target.lock_write()
1673
1693
        self.addCleanup(target.unlock)
1722
1742
    def test_open_with_present_feature(self):
1723
1743
        self.addCleanup(
1724
1744
            bzrrepository.RepositoryFormatMetaDir.unregister_feature,
1725
 
            "makes-cheese-sandwich")
 
1745
            b"makes-cheese-sandwich")
1726
1746
        bzrrepository.RepositoryFormatMetaDir.register_feature(
1727
 
            "makes-cheese-sandwich")
 
1747
            b"makes-cheese-sandwich")
1728
1748
        repo = self.make_repository('.')
1729
1749
        repo.lock_write()
1730
 
        repo._format.features["makes-cheese-sandwich"] = "required"
 
1750
        repo._format.features[b"makes-cheese-sandwich"] = b"required"
1731
1751
        repo._format.check_support_status(False)
1732
1752
        repo.unlock()
1733
1753
 
1734
1754
    def test_open_with_missing_required_feature(self):
1735
1755
        repo = self.make_repository('.')
1736
1756
        repo.lock_write()
1737
 
        repo._format.features["makes-cheese-sandwich"] = "required"
 
1757
        repo._format.features[b"makes-cheese-sandwich"] = b"required"
1738
1758
        self.assertRaises(bzrdir.MissingFeature,
1739
 
            repo._format.check_support_status, False)
 
1759
                          repo._format.check_support_status, False)