/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/tests/test_repository.py

  • Committer: Jelmer Vernooij
  • Date: 2018-05-06 11:48:54 UTC
  • mto: This revision was merged to the branch mainline in revision 6960.
  • Revision ID: jelmer@jelmer.uk-20180506114854-h4qd9ojaqy8wxjsd
Move .mailmap to root.

Show diffs side-by-side

added added

removed removed

Lines of Context:
27
27
import breezy
28
28
from breezy.errors import (
29
29
    UnknownFormatError,
 
30
    UnsupportedFormatError,
30
31
    )
31
32
from breezy import (
32
33
    tests,
72
73
        private_default = old_default().repository_format.__class__
73
74
        old_format = repository.format_registry.get_default()
74
75
        self.assertTrue(isinstance(old_format, private_default))
75
 
 
76
76
        def make_sample_bzrdir():
77
77
            my_bzrdir = bzrdir.BzrDirMetaFormat1()
78
78
            my_bzrdir.repository_format = SampleRepositoryFormat()
137
137
        # create a branch with a few known format objects.
138
138
        # this is not quite the same as
139
139
        self.build_tree(["foo/", "bar/"])
140
 
 
141
140
        def check_format(format, url):
142
141
            dir = format._matchingcontroldir.initialize(url)
143
142
            format.initialize(dir)
144
 
            found_format = bzrrepository.RepositoryFormatMetaDir.find_format(
145
 
                dir)
 
143
            t = transport.get_transport_from_path(url)
 
144
            found_format = bzrrepository.RepositoryFormatMetaDir.find_format(dir)
146
145
            self.assertIsInstance(found_format, format.__class__)
147
146
        check_format(repository.format_registry.get_default(), "bar")
148
147
 
158
157
                b"Sample .bzr repository format."),
159
158
            SampleRepositoryFormat)
160
159
        self.assertRaises(AssertionError,
161
 
                          SampleRepositoryFormat.from_string,
162
 
                          b"Different .bzr repository format.")
 
160
            SampleRepositoryFormat.from_string,
 
161
                b"Different .bzr repository format.")
163
162
 
164
163
    def test_find_format_unknown_format(self):
165
164
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
171
170
    def test_find_format_with_features(self):
172
171
        tree = self.make_branch_and_tree('.', format='2a')
173
172
        tree.branch.repository.update_feature_flags({b"name": b"necessity"})
174
 
        found_format = bzrrepository.RepositoryFormatMetaDir.find_format(
175
 
            tree.controldir)
176
 
        self.assertIsInstance(
177
 
            found_format, bzrrepository.RepositoryFormatMetaDir)
 
173
        found_format = bzrrepository.RepositoryFormatMetaDir.find_format(tree.controldir)
 
174
        self.assertIsInstance(found_format, bzrrepository.RepositoryFormatMetaDir)
178
175
        self.assertEqual(found_format.features.get(b"name"), b"necessity")
179
 
        self.assertRaises(
180
 
            bzrdir.MissingFeature, found_format.check_support_status, True)
181
 
        self.addCleanup(
182
 
            bzrrepository.RepositoryFormatMetaDir.unregister_feature, b"name")
 
176
        self.assertRaises(bzrdir.MissingFeature, found_format.check_support_status,
 
177
            True)
 
178
        self.addCleanup(bzrrepository.RepositoryFormatMetaDir.unregister_feature,
 
179
            b"name")
183
180
        bzrrepository.RepositoryFormatMetaDir.register_feature(b"name")
184
181
        found_format.check_support_status(True)
185
182
 
193
190
    def test_register_unregister_format(self):
194
191
        format = SampleRepositoryFormat()
195
192
        self.registry.register(format)
196
 
        self.assertEqual(format, self.registry.get(
197
 
            b"Sample .bzr repository format."))
 
193
        self.assertEqual(format, self.registry.get(b"Sample .bzr repository format."))
198
194
        self.registry.remove(format)
199
 
        self.assertRaises(KeyError, self.registry.get,
200
 
                          b"Sample .bzr repository format.")
 
195
        self.assertRaises(KeyError, self.registry.get, b"Sample .bzr repository format.")
201
196
 
202
197
    def test_get_all(self):
203
198
        format = SampleRepositoryFormat()
214
209
    def test_register_extra_lazy(self):
215
210
        self.assertEqual([], self.registry._get_all())
216
211
        self.registry.register_extra_lazy("breezy.tests.test_repository",
217
 
                                          "SampleExtraRepositoryFormat")
 
212
            "SampleExtraRepositoryFormat")
218
213
        formats = self.registry._get_all()
219
214
        self.assertEqual(1, len(formats))
220
215
        self.assertIsInstance(formats[0], SampleExtraRepositoryFormat)
224
219
 
225
220
    def test_attribute__fetch_order(self):
226
221
        """Knits need topological data insertion."""
227
 
        repo = self.make_repository(
228
 
            '.', format=controldir.format_registry.get('knit')())
 
222
        repo = self.make_repository('.',
 
223
                format=controldir.format_registry.get('knit')())
229
224
        self.assertEqual('topological', repo._format._fetch_order)
230
225
 
231
226
    def test_attribute__fetch_uses_deltas(self):
232
227
        """Knits reuse deltas."""
233
 
        repo = self.make_repository(
234
 
            '.', format=controldir.format_registry.get('knit')())
 
228
        repo = self.make_repository('.',
 
229
                format=controldir.format_registry.get('knit')())
235
230
        self.assertEqual(True, repo._format._fetch_uses_deltas)
236
231
 
237
232
    def test_disk_layout(self):
247
242
        # empty revision-store directory
248
243
        # empty weaves directory
249
244
        t = control.get_repository_transport(None)
250
 
        with t.get('format') as f:
251
 
            self.assertEqualDiff(b'Bazaar-NG Knit Repository Format 1',
252
 
                                 f.read())
 
245
        self.assertEqualDiff('Bazaar-NG Knit Repository Format 1',
 
246
                             t.get('format').read())
253
247
        # XXX: no locks left when unlocked at the moment
254
248
        # self.assertEqualDiff('', t.get('lock').read())
255
249
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
256
250
        self.check_knits(t)
257
251
        # Check per-file knits.
258
 
        control.create_branch()
 
252
        branch = control.create_branch()
259
253
        tree = control.create_workingtree()
260
 
        tree.add(['foo'], [b'Nasty-IdC:'], ['file'])
261
 
        tree.put_file_bytes_non_atomic('foo', b'')
 
254
        tree.add(['foo'], ['Nasty-IdC:'], ['file'])
 
255
        tree.put_file_bytes_non_atomic('foo', '')
262
256
        tree.commit('1st post', rev_id=b'foo')
263
257
        self.assertHasKnit(t, 'knits/e8/%254easty-%2549d%2543%253a',
264
 
                           b'\nfoo fulltext 0 81  :')
 
258
            '\nfoo fulltext 0 81  :')
265
259
 
266
 
    def assertHasKnit(self, t, knit_name, extra_content=b''):
 
260
    def assertHasKnit(self, t, knit_name, extra_content=''):
267
261
        """Assert that knit_name exists on t."""
268
 
        self.assertEqualDiff(b'# bzr knit index 8\n' + extra_content,
 
262
        self.assertEqualDiff('# bzr knit index 8\n' + extra_content,
269
263
                             t.get(knit_name + '.kndx').read())
270
264
 
271
265
    def check_knits(self, t):
276
270
 
277
271
    def test_shared_disk_layout(self):
278
272
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
279
 
        knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
 
273
        repo = knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
280
274
        # we want:
281
275
        # format 'Bazaar-NG Knit Repository Format 1'
282
276
        # lock: is a directory
285
279
        # empty weaves directory
286
280
        # a 'shared-storage' marker file.
287
281
        t = control.get_repository_transport(None)
288
 
        with t.get('format') as f:
289
 
            self.assertEqualDiff(b'Bazaar-NG Knit Repository Format 1',
290
 
                                 f.read())
 
282
        self.assertEqualDiff('Bazaar-NG Knit Repository Format 1',
 
283
                             t.get('format').read())
291
284
        # XXX: no locks left when unlocked at the moment
292
285
        # self.assertEqualDiff('', t.get('lock').read())
293
 
        self.assertEqualDiff(b'', t.get('shared-storage').read())
 
286
        self.assertEqualDiff('', t.get('shared-storage').read())
294
287
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
295
288
        self.check_knits(t)
296
289
 
297
290
    def test_shared_no_tree_disk_layout(self):
298
291
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
299
 
        repo = knitrepo.RepositoryFormatKnit1().initialize(
300
 
            control, shared=True)
 
292
        repo = knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
301
293
        repo.set_make_working_trees(False)
302
294
        # we want:
303
295
        # format 'Bazaar-NG Knit Repository Format 1'
307
299
        # empty weaves directory
308
300
        # a 'shared-storage' marker file.
309
301
        t = control.get_repository_transport(None)
310
 
        with t.get('format') as f:
311
 
            self.assertEqualDiff(b'Bazaar-NG Knit Repository Format 1',
312
 
                                 f.read())
 
302
        self.assertEqualDiff('Bazaar-NG Knit Repository Format 1',
 
303
                             t.get('format').read())
313
304
        # XXX: no locks left when unlocked at the moment
314
305
        # self.assertEqualDiff('', t.get('lock').read())
315
 
        self.assertEqualDiff(b'', t.get('shared-storage').read())
316
 
        self.assertEqualDiff(b'', t.get('no-working-trees').read())
 
306
        self.assertEqualDiff('', t.get('shared-storage').read())
 
307
        self.assertEqualDiff('', t.get('no-working-trees').read())
317
308
        repo.set_make_working_trees(True)
318
309
        self.assertFalse(t.has('no-working-trees'))
319
310
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
326
317
        the whole inventory. So we grab the one from the expected text. Which
327
318
        is valid when the api is not being abused.
328
319
        """
329
 
        repo = self.make_repository(
330
 
            '.', format=controldir.format_registry.get('knit')())
331
 
        inv_xml = b'<inventory format="5">\n</inventory>\n'
332
 
        inv = repo._deserialise_inventory(b'test-rev-id', [inv_xml])
333
 
        self.assertEqual(b'test-rev-id', inv.root.revision)
 
320
        repo = self.make_repository('.',
 
321
                format=controldir.format_registry.get('knit')())
 
322
        inv_xml = '<inventory format="5">\n</inventory>\n'
 
323
        inv = repo._deserialise_inventory('test-rev-id', inv_xml)
 
324
        self.assertEqual('test-rev-id', inv.root.revision)
334
325
 
335
326
    def test_deserialise_uses_global_revision_id(self):
336
327
        """If it is set, then we re-use the global revision id"""
337
 
        repo = self.make_repository(
338
 
            '.', format=controldir.format_registry.get('knit')())
339
 
        inv_xml = (b'<inventory format="5" revision_id="other-rev-id">\n'
340
 
                   b'</inventory>\n')
 
328
        repo = self.make_repository('.',
 
329
                format=controldir.format_registry.get('knit')())
 
330
        inv_xml = ('<inventory format="5" revision_id="other-rev-id">\n'
 
331
                   '</inventory>\n')
341
332
        # Arguably, the deserialise_inventory should detect a mismatch, and
342
333
        # raise an error, rather than silently using one revision_id over the
343
334
        # other.
344
335
        self.assertRaises(AssertionError, repo._deserialise_inventory,
345
 
                          b'test-rev-id', [inv_xml])
346
 
        inv = repo._deserialise_inventory(b'other-rev-id', [inv_xml])
347
 
        self.assertEqual(b'other-rev-id', inv.root.revision)
 
336
            'test-rev-id', inv_xml)
 
337
        inv = repo._deserialise_inventory('other-rev-id', inv_xml)
 
338
        self.assertEqual('other-rev-id', inv.root.revision)
348
339
 
349
340
    def test_supports_external_lookups(self):
350
 
        repo = self.make_repository(
351
 
            '.', format=controldir.format_registry.get('knit')())
 
341
        repo = self.make_repository('.',
 
342
                format=controldir.format_registry.get('knit')())
352
343
        self.assertFalse(repo._format.supports_external_lookups)
353
344
 
354
345
 
382
373
    def is_compatible(repo_source, repo_target):
383
374
        """InterDummy is compatible with DummyRepository."""
384
375
        return (isinstance(repo_source, DummyRepository) and
385
 
                isinstance(repo_target, DummyRepository))
 
376
            isinstance(repo_target, DummyRepository))
386
377
 
387
378
 
388
379
class TestInterRepository(TestCaseWithTransport):
427
418
        repo = self.make_repository('.')
428
419
        # hack dummies to look like repo somewhat.
429
420
        dummy_a._serializer = repo._serializer
430
 
        dummy_a._format.supports_tree_reference = (
431
 
            repo._format.supports_tree_reference)
 
421
        dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
432
422
        dummy_a._format.rich_root_data = repo._format.rich_root_data
433
 
        dummy_a._format.supports_full_versioned_files = (
434
 
            repo._format.supports_full_versioned_files)
 
423
        dummy_a._format.supports_full_versioned_files = repo._format.supports_full_versioned_files
435
424
        dummy_b._serializer = repo._serializer
436
 
        dummy_b._format.supports_tree_reference = (
437
 
            repo._format.supports_tree_reference)
 
425
        dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
438
426
        dummy_b._format.rich_root_data = repo._format.rich_root_data
439
 
        dummy_b._format.supports_full_versioned_files = (
440
 
            repo._format.supports_full_versioned_files)
 
427
        dummy_b._format.supports_full_versioned_files = repo._format.supports_full_versioned_files
441
428
        repository.InterRepository.register_optimiser(InterDummy)
442
429
        try:
443
430
            # we should get the default for something InterDummy returns False
477
464
        target_format = TestRepositoryFormat2()
478
465
        repository.format_registry.register(source_format)
479
466
        self.addCleanup(repository.format_registry.remove,
480
 
                        source_format)
 
467
            source_format)
481
468
        repository.format_registry.register(target_format)
482
469
        self.addCleanup(repository.format_registry.remove,
483
 
                        target_format)
 
470
            target_format)
484
471
        t = self.get_transport()
485
472
        t.mkdir('repository')
486
473
        repo_dir = bzrdir.BzrDirMetaFormat1().initialize('repository')
513
500
        format = bzrdir.BzrDirMetaFormat1()
514
501
        format.repository_format = knitrepo.RepositoryFormatKnit1()
515
502
        tree = self.make_branch_and_tree('.', format)
516
 
        tree.commit("Dull commit", rev_id=b"dull")
517
 
        revision_tree = tree.branch.repository.revision_tree(b'dull')
518
 
        with revision_tree.lock_read():
519
 
            self.assertRaises(
520
 
                errors.NoSuchFile, revision_tree.get_file_lines, u'')
 
503
        tree.commit("Dull commit", rev_id="dull")
 
504
        revision_tree = tree.branch.repository.revision_tree('dull')
 
505
        revision_tree.lock_read()
 
506
        try:
 
507
            self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
 
508
                u'', revision_tree.get_root_id())
 
509
        finally:
 
510
            revision_tree.unlock()
521
511
        format = bzrdir.BzrDirMetaFormat1()
522
512
        format.repository_format = knitrepo.RepositoryFormatKnit3()
523
513
        upgrade.Convert('.', format)
524
514
        tree = workingtree.WorkingTree.open('.')
525
 
        revision_tree = tree.branch.repository.revision_tree(b'dull')
526
 
        with revision_tree.lock_read():
527
 
            revision_tree.get_file_lines(u'')
 
515
        revision_tree = tree.branch.repository.revision_tree('dull')
 
516
        revision_tree.lock_read()
 
517
        try:
 
518
            revision_tree.get_file_lines(u'', revision_tree.get_root_id())
 
519
        finally:
 
520
            revision_tree.unlock()
528
521
        tree.commit("Another dull commit", rev_id=b'dull2')
529
 
        revision_tree = tree.branch.repository.revision_tree(b'dull2')
 
522
        revision_tree = tree.branch.repository.revision_tree('dull2')
530
523
        revision_tree.lock_read()
531
524
        self.addCleanup(revision_tree.unlock)
532
 
        self.assertEqual(b'dull', revision_tree.get_file_revision(u''))
 
525
        self.assertEqual('dull',
 
526
                revision_tree.get_file_revision(u'', revision_tree.get_root_id()))
533
527
 
534
528
    def test_supports_external_lookups(self):
535
529
        format = bzrdir.BzrDirMetaFormat1()
561
555
        builder.build_snapshot(None, [
562
556
            ('add', ('', b'root-id', 'directory', '')),
563
557
            ('add', ('file', b'file-id', 'file', b'content\n'))],
564
 
            revision_id=b'1')
565
 
        builder.build_snapshot([b'1'], [
566
 
            ('modify', ('file', b'content-2\n'))],
567
 
            revision_id=b'2')
568
 
        builder.finish_series()
569
 
        source = builder.get_branch()
570
 
        target = self.make_repository('target', format='2a')
571
 
        target.fetch(source.repository)
572
 
        target.lock_read()
573
 
        self.addCleanup(target.unlock)
574
 
        details = target.texts._index.get_build_details(
575
 
            [(b'file-id', b'1',), (b'file-id', b'2',)])
576
 
        file_1_details = details[(b'file-id', b'1')]
577
 
        file_2_details = details[(b'file-id', b'2')]
578
 
        # The index, and what to read off disk, should be the same for both
579
 
        # versions of the file.
580
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
581
 
 
582
 
    def test_fetch_combines_groups(self):
583
 
        builder = self.make_branch_builder('source', format='2a')
584
 
        builder.start_series()
585
 
        builder.build_snapshot(None, [
586
 
            ('add', ('', b'root-id', 'directory', '')),
587
 
            ('add', ('file', b'file-id', 'file', b'content\n'))],
588
 
            revision_id=b'1')
589
 
        builder.build_snapshot([b'1'], [
590
 
            ('modify', ('file', b'content-2\n'))],
591
 
            revision_id=b'2')
592
 
        builder.finish_series()
593
 
        source = builder.get_branch()
594
 
        target = self.make_repository('target', format='2a')
595
 
        target.fetch(source.repository)
596
 
        target.lock_read()
597
 
        self.addCleanup(target.unlock)
598
 
        details = target.texts._index.get_build_details(
599
 
            [(b'file-id', b'1',), (b'file-id', b'2',)])
600
 
        file_1_details = details[(b'file-id', b'1')]
601
 
        file_2_details = details[(b'file-id', b'2')]
602
 
        # The index, and what to read off disk, should be the same for both
603
 
        # versions of the file.
604
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
605
 
 
606
 
    def test_fetch_combines_groups(self):
607
 
        builder = self.make_branch_builder('source', format='2a')
608
 
        builder.start_series()
609
 
        builder.build_snapshot(None, [
610
 
            ('add', ('', b'root-id', 'directory', '')),
611
 
            ('add', ('file', b'file-id', 'file', b'content\n'))],
 
558
            revision_id='1')
 
559
        builder.build_snapshot([b'1'], [
 
560
            ('modify', ('file', b'content-2\n'))],
 
561
            revision_id=b'2')
 
562
        builder.finish_series()
 
563
        source = builder.get_branch()
 
564
        target = self.make_repository('target', format='2a')
 
565
        target.fetch(source.repository)
 
566
        target.lock_read()
 
567
        self.addCleanup(target.unlock)
 
568
        details = target.texts._index.get_build_details(
 
569
            [(b'file-id', b'1',), (b'file-id', b'2',)])
 
570
        file_1_details = details[(b'file-id', b'1')]
 
571
        file_2_details = details[(b'file-id', b'2')]
 
572
        # The index, and what to read off disk, should be the same for both
 
573
        # versions of the file.
 
574
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
575
 
 
576
    def test_fetch_combines_groups(self):
 
577
        builder = self.make_branch_builder('source', format='2a')
 
578
        builder.start_series()
 
579
        builder.build_snapshot(None, [
 
580
            ('add', ('', b'root-id', 'directory', '')),
 
581
            ('add', ('file', b'file-id', 'file', 'content\n'))],
 
582
            revision_id=b'1')
 
583
        builder.build_snapshot([b'1'], [
 
584
            ('modify', ('file', b'content-2\n'))],
 
585
            revision_id=b'2')
 
586
        builder.finish_series()
 
587
        source = builder.get_branch()
 
588
        target = self.make_repository('target', format='2a')
 
589
        target.fetch(source.repository)
 
590
        target.lock_read()
 
591
        self.addCleanup(target.unlock)
 
592
        details = target.texts._index.get_build_details(
 
593
            [(b'file-id', b'1',), (b'file-id', b'2',)])
 
594
        file_1_details = details[(b'file-id', b'1')]
 
595
        file_2_details = details[(b'file-id', b'2')]
 
596
        # The index, and what to read off disk, should be the same for both
 
597
        # versions of the file.
 
598
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
599
 
 
600
    def test_fetch_combines_groups(self):
 
601
        builder = self.make_branch_builder('source', format='2a')
 
602
        builder.start_series()
 
603
        builder.build_snapshot(None, [
 
604
            ('add', ('', b'root-id', 'directory', '')),
 
605
            ('add', ('file', b'file-id', 'file', 'content\n'))],
612
606
            revision_id=b'1')
613
607
        builder.build_snapshot([b'1'], [
614
608
            ('modify', ('file', b'content-2\n'))],
644
638
        inv.parent_id_basename_to_file_id._ensure_root()
645
639
        inv.id_to_entry._ensure_root()
646
640
        self.assertEqual(65536, inv.id_to_entry._root_node.maximum_size)
647
 
        self.assertEqual(
648
 
            65536, inv.parent_id_basename_to_file_id._root_node.maximum_size)
 
641
        self.assertEqual(65536,
 
642
            inv.parent_id_basename_to_file_id._root_node.maximum_size)
649
643
 
650
644
    def test_autopack_unchanged_chk_nodes(self):
651
645
        # at 20 unchanged commits, chk pages are packed that are split into
696
690
 
697
691
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
698
692
        source_builder = self.make_branch_builder('source',
699
 
                                                  format='2a')
 
693
                            format='2a')
700
694
        # We have to build a fairly large tree, so that we are sure the chk
701
695
        # pages will have split into multiple pages.
702
696
        entries = [('add', ('', b'a-root-id', 'directory', None))]
704
698
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
705
699
                fname = i + j
706
700
                fid = fname.encode('utf-8') + b'-id'
707
 
                content = b'content for %s\n' % (fname.encode('utf-8'),)
 
701
                content = 'content for %s\n' % (fname,)
708
702
                entries.append(('add', (fname, fid, 'file', content)))
709
703
        source_builder.start_series()
710
704
        source_builder.build_snapshot(None, entries, revision_id=b'rev-1')
726
720
        # On a regular pass, getting the inventories and chk pages for rev-2
727
721
        # would only get the newly created chk pages
728
722
        search = vf_search.SearchResult({b'rev-2'}, {b'rev-1'}, 1,
729
 
                                        {b'rev-2'})
730
 
        simple_chk_records = set()
 
723
                                    {b'rev-2'})
 
724
        simple_chk_records = []
731
725
        for vf_name, substream in source.get_stream(search):
732
726
            if vf_name == 'chk_bytes':
733
727
                for record in substream:
734
 
                    simple_chk_records.add(record.key)
 
728
                    simple_chk_records.append(record.key)
735
729
            else:
736
730
                for _ in substream:
737
731
                    continue
738
732
        # 3 pages, the root (InternalNode), + 2 pages which actually changed
739
 
        self.assertEqual({(b'sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
740
 
                          (b'sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
741
 
                          (b'sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
742
 
                          (b'sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)},
743
 
                         set(simple_chk_records))
 
733
        self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
 
734
                          ('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
 
735
                          ('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
 
736
                          ('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
 
737
                         simple_chk_records)
744
738
        # Now, when we do a similar call using 'get_stream_for_missing_keys'
745
739
        # we should get a much larger set of pages.
746
 
        missing = [('inventories', b'rev-2')]
747
 
        full_chk_records = set()
 
740
        missing = [('inventories', 'rev-2')]
 
741
        full_chk_records = []
748
742
        for vf_name, substream in source.get_stream_for_missing_keys(missing):
749
743
            if vf_name == 'inventories':
750
744
                for record in substream:
751
 
                    self.assertEqual((b'rev-2',), record.key)
 
745
                    self.assertEqual(('rev-2',), record.key)
752
746
            elif vf_name == 'chk_bytes':
753
747
                for record in substream:
754
 
                    full_chk_records.add(record.key)
 
748
                    full_chk_records.append(record.key)
755
749
            else:
756
750
                self.fail('Should not be getting a stream of %s' % (vf_name,))
757
751
        # We have 257 records now. This is because we have 1 root page, and 256
774
768
        source = self.make_repository('source', format='pack-0.92')
775
769
        target = self.make_repository('target', format='pack-0.92')
776
770
        stream_source = source._get_source(target._format)
777
 
        self.assertIsInstance(
778
 
            stream_source, knitpack_repo.KnitPackStreamSource)
 
771
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
779
772
 
780
773
    def test_source_to_exact_pack_rich_root_pack(self):
781
774
        source = self.make_repository('source', format='rich-root-pack')
782
775
        target = self.make_repository('target', format='rich-root-pack')
783
776
        stream_source = source._get_source(target._format)
784
 
        self.assertIsInstance(
785
 
            stream_source, knitpack_repo.KnitPackStreamSource)
 
777
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
786
778
 
787
779
    def test_source_to_exact_pack_19(self):
788
780
        source = self.make_repository('source', format='1.9')
789
781
        target = self.make_repository('target', format='1.9')
790
782
        stream_source = source._get_source(target._format)
791
 
        self.assertIsInstance(
792
 
            stream_source, knitpack_repo.KnitPackStreamSource)
 
783
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
793
784
 
794
785
    def test_source_to_exact_pack_19_rich_root(self):
795
786
        source = self.make_repository('source', format='1.9-rich-root')
796
787
        target = self.make_repository('target', format='1.9-rich-root')
797
788
        stream_source = source._get_source(target._format)
798
 
        self.assertIsInstance(
799
 
            stream_source, knitpack_repo.KnitPackStreamSource)
 
789
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
800
790
 
801
791
    def test_source_to_remote_exact_pack_19(self):
802
792
        trans = self.make_smart_server('target')
805
795
        target = self.make_repository('target', format='1.9')
806
796
        target = repository.Repository.open(trans.base)
807
797
        stream_source = source._get_source(target._format)
808
 
        self.assertIsInstance(
809
 
            stream_source, knitpack_repo.KnitPackStreamSource)
 
798
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
810
799
 
811
800
    def test_stream_source_to_non_exact(self):
812
801
        source = self.make_repository('source', format='pack-0.92')
843
832
        super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
844
833
        self.builder = self.make_branch_builder('source')
845
834
        self.builder.start_series()
846
 
        self.builder.build_snapshot(
847
 
            None,
848
 
            [('add', ('', b'tree-root', 'directory', None))],
849
 
            revision_id=b'initial')
 
835
        self.builder.build_snapshot(None,
 
836
            [('add', ('', 'tree-root', 'directory', None))],
 
837
            revision_id='initial')
850
838
        self.repo = self.builder.get_branch().repository
851
839
        self.addCleanup(self.builder.finish_series)
852
840
 
853
841
    def assertParentIds(self, expected_result, rev_set):
854
 
        self.assertEqual(
855
 
            sorted(expected_result),
 
842
        self.assertEqual(sorted(expected_result),
856
843
            sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
857
844
 
858
845
    def test_simple(self):
859
 
        self.builder.build_snapshot(None, [], revision_id=b'revid1')
860
 
        self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2')
861
 
        rev_set = [b'revid2']
862
 
        self.assertParentIds([b'revid1'], rev_set)
 
846
        self.builder.build_snapshot(None, [], revision_id='revid1')
 
847
        self.builder.build_snapshot(['revid1'], [], revision_id='revid2')
 
848
        rev_set = ['revid2']
 
849
        self.assertParentIds(['revid1'], rev_set)
863
850
 
864
851
    def test_not_first_parent(self):
865
 
        self.builder.build_snapshot(None, [], revision_id=b'revid1')
866
 
        self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2')
867
 
        self.builder.build_snapshot([b'revid2'], [], revision_id=b'revid3')
868
 
        rev_set = [b'revid3', b'revid2']
869
 
        self.assertParentIds([b'revid1'], rev_set)
 
852
        self.builder.build_snapshot(None, [], revision_id='revid1')
 
853
        self.builder.build_snapshot(['revid1'], [], revision_id='revid2')
 
854
        self.builder.build_snapshot(['revid2'], [], revision_id='revid3')
 
855
        rev_set = ['revid3', 'revid2']
 
856
        self.assertParentIds(['revid1'], rev_set)
870
857
 
871
858
    def test_not_null(self):
872
 
        rev_set = [b'initial']
 
859
        rev_set = ['initial']
873
860
        self.assertParentIds([], rev_set)
874
861
 
875
862
    def test_not_null_set(self):
876
 
        self.builder.build_snapshot(None, [], revision_id=b'revid1')
 
863
        self.builder.build_snapshot(None, [], revision_id='revid1')
877
864
        rev_set = [_mod_revision.NULL_REVISION]
878
865
        self.assertParentIds([], rev_set)
879
866
 
880
867
    def test_ghost(self):
881
 
        self.builder.build_snapshot(None, [], revision_id=b'revid1')
882
 
        rev_set = [b'ghost', b'revid1']
883
 
        self.assertParentIds([b'initial'], rev_set)
 
868
        self.builder.build_snapshot(None, [], revision_id='revid1')
 
869
        rev_set = ['ghost', 'revid1']
 
870
        self.assertParentIds(['initial'], rev_set)
884
871
 
885
872
    def test_ghost_parent(self):
886
 
        self.builder.build_snapshot(None, [], revision_id=b'revid1')
887
 
        self.builder.build_snapshot(
888
 
            [b'revid1', b'ghost'], [], revision_id=b'revid2')
889
 
        rev_set = [b'revid2', b'revid1']
890
 
        self.assertParentIds([b'ghost', b'initial'], rev_set)
 
873
        self.builder.build_snapshot(None, [], revision_id='revid1')
 
874
        self.builder.build_snapshot(['revid1', 'ghost'], [], revision_id='revid2')
 
875
        rev_set = ['revid2', 'revid1']
 
876
        self.assertParentIds(['ghost', 'initial'], rev_set)
891
877
 
892
878
    def test_righthand_parent(self):
893
 
        self.builder.build_snapshot(None, [], revision_id=b'revid1')
894
 
        self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2a')
895
 
        self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2b')
896
 
        self.builder.build_snapshot([b'revid2a', b'revid2b'], [],
897
 
                                    revision_id=b'revid3')
898
 
        rev_set = [b'revid3', b'revid2a']
899
 
        self.assertParentIds([b'revid1', b'revid2b'], rev_set)
 
879
        self.builder.build_snapshot(None, [], revision_id='revid1')
 
880
        self.builder.build_snapshot(['revid1'], [], revision_id='revid2a')
 
881
        self.builder.build_snapshot(['revid1'], [], revision_id='revid2b')
 
882
        self.builder.build_snapshot(['revid2a', 'revid2b'], [],
 
883
                                    revision_id='revid3')
 
884
        rev_set = ['revid3', 'revid2a']
 
885
        self.assertParentIds(['revid1', 'revid2b'], rev_set)
900
886
 
901
887
 
902
888
class TestWithBrokenRepo(TestCaseWithTransport):
914
900
            repo.start_write_group()
915
901
            cleanups.append(repo.commit_write_group)
916
902
            # make rev1a: A well-formed revision, containing 'file1'
917
 
            inv = inventory.Inventory(revision_id=b'rev1a')
918
 
            inv.root.revision = b'rev1a'
919
 
            self.add_file(repo, inv, 'file1', b'rev1a', [])
920
 
            repo.texts.add_lines((inv.root.file_id, b'rev1a'), [], [])
921
 
            repo.add_inventory(b'rev1a', inv, [])
922
 
            revision = _mod_revision.Revision(
923
 
                b'rev1a',
 
903
            inv = inventory.Inventory(revision_id='rev1a')
 
904
            inv.root.revision = 'rev1a'
 
905
            self.add_file(repo, inv, 'file1', 'rev1a', [])
 
906
            repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
 
907
            repo.add_inventory('rev1a', inv, [])
 
908
            revision = _mod_revision.Revision('rev1a',
924
909
                committer='jrandom@example.com', timestamp=0,
925
910
                inventory_sha1='', timezone=0, message='foo', parent_ids=[])
926
 
            repo.add_revision(b'rev1a', revision, inv)
 
911
            repo.add_revision('rev1a', revision, inv)
927
912
 
928
913
            # make rev1b, which has no Revision, but has an Inventory, and
929
914
            # file1
930
 
            inv = inventory.Inventory(revision_id=b'rev1b')
931
 
            inv.root.revision = b'rev1b'
932
 
            self.add_file(repo, inv, 'file1', b'rev1b', [])
933
 
            repo.add_inventory(b'rev1b', inv, [])
 
915
            inv = inventory.Inventory(revision_id='rev1b')
 
916
            inv.root.revision = 'rev1b'
 
917
            self.add_file(repo, inv, 'file1', 'rev1b', [])
 
918
            repo.add_inventory('rev1b', inv, [])
934
919
 
935
920
            # make rev2, with file1 and file2
936
921
            # file2 is sane
937
922
            # file1 has 'rev1b' as an ancestor, even though this is not
938
923
            # mentioned by 'rev1a', making it an unreferenced ancestor
939
924
            inv = inventory.Inventory()
940
 
            self.add_file(repo, inv, 'file1', b'rev2', [b'rev1a', b'rev1b'])
941
 
            self.add_file(repo, inv, 'file2', b'rev2', [])
942
 
            self.add_revision(repo, b'rev2', inv, [b'rev1a'])
 
925
            self.add_file(repo, inv, 'file1', 'rev2', ['rev1a', 'rev1b'])
 
926
            self.add_file(repo, inv, 'file2', 'rev2', [])
 
927
            self.add_revision(repo, 'rev2', inv, ['rev1a'])
943
928
 
944
929
            # make ghost revision rev1c
945
930
            inv = inventory.Inventory()
946
 
            self.add_file(repo, inv, 'file2', b'rev1c', [])
 
931
            self.add_file(repo, inv, 'file2', 'rev1c', [])
947
932
 
948
933
            # make rev3 with file2
949
934
            # file2 refers to 'rev1c', which is a ghost in this repository, so
950
935
            # file2 cannot have rev1c as its ancestor.
951
936
            inv = inventory.Inventory()
952
 
            self.add_file(repo, inv, 'file2', b'rev3', [b'rev1c'])
953
 
            self.add_revision(repo, b'rev3', inv, [b'rev1c'])
 
937
            self.add_file(repo, inv, 'file2', 'rev3', ['rev1c'])
 
938
            self.add_revision(repo, 'rev3', inv, ['rev1c'])
954
939
            return repo
955
940
        finally:
956
941
            for cleanup in reversed(cleanups):
961
946
        inv.root.revision = revision_id
962
947
        repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
963
948
        repo.add_inventory(revision_id, inv, parent_ids)
964
 
        revision = _mod_revision.Revision(
965
 
            revision_id,
 
949
        revision = _mod_revision.Revision(revision_id,
966
950
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
967
951
            timezone=0, message='foo', parent_ids=parent_ids)
968
952
        repo.add_revision(revision_id, revision, inv)
969
953
 
970
954
    def add_file(self, repo, inv, filename, revision, parents):
971
 
        file_id = filename.encode('utf-8') + b'-id'
972
 
        content = [b'line\n']
973
 
        entry = inventory.InventoryFile(file_id, filename, b'TREE_ROOT')
 
955
        file_id = filename + '-id'
 
956
        entry = inventory.InventoryFile(file_id, filename, 'TREE_ROOT')
974
957
        entry.revision = revision
975
 
        entry.text_sha1 = osutils.sha_strings(content)
976
958
        entry.text_size = 0
977
959
        inv.add(entry)
978
960
        text_key = (file_id, revision)
979
961
        parent_keys = [(file_id, parent) for parent in parents]
980
 
        repo.texts.add_lines(text_key, parent_keys, content)
 
962
        repo.texts.add_lines(text_key, parent_keys, ['line\n'])
981
963
 
982
964
    def test_insert_from_broken_repo(self):
983
965
        """Inserting a data stream from a broken repository won't silently
994
976
        empty_repo.lock_read()
995
977
        self.addCleanup(empty_repo.unlock)
996
978
        text = next(empty_repo.texts.get_record_stream(
997
 
            [(b'file2-id', b'rev3')], 'topological', True))
998
 
        self.assertEqual(b'line\n', text.get_bytes_as('fulltext'))
 
979
            [('file2-id', 'rev3')], 'topological', True))
 
980
        self.assertEqual('line\n', text.get_bytes_as('fulltext'))
999
981
 
1000
982
 
1001
983
class TestRepositoryPackCollection(TestCaseWithTransport):
1029
1011
    def test__clear_obsolete_packs(self):
1030
1012
        packs = self.get_packs()
1031
1013
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1032
 
        obsolete_pack_trans.put_bytes('a-pack.pack', b'content\n')
1033
 
        obsolete_pack_trans.put_bytes('a-pack.rix', b'content\n')
1034
 
        obsolete_pack_trans.put_bytes('a-pack.iix', b'content\n')
1035
 
        obsolete_pack_trans.put_bytes('another-pack.pack', b'foo\n')
1036
 
        obsolete_pack_trans.put_bytes('not-a-pack.rix', b'foo\n')
 
1014
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
 
1015
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
 
1016
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
 
1017
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
 
1018
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1037
1019
        res = packs._clear_obsolete_packs()
1038
1020
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1039
1021
        self.assertEqual([], obsolete_pack_trans.list_dir('.'))
1041
1023
    def test__clear_obsolete_packs_preserve(self):
1042
1024
        packs = self.get_packs()
1043
1025
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1044
 
        obsolete_pack_trans.put_bytes('a-pack.pack', b'content\n')
1045
 
        obsolete_pack_trans.put_bytes('a-pack.rix', b'content\n')
1046
 
        obsolete_pack_trans.put_bytes('a-pack.iix', b'content\n')
1047
 
        obsolete_pack_trans.put_bytes('another-pack.pack', b'foo\n')
1048
 
        obsolete_pack_trans.put_bytes('not-a-pack.rix', b'foo\n')
 
1026
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
 
1027
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
 
1028
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
 
1029
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
 
1030
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1049
1031
        res = packs._clear_obsolete_packs(preserve={'a-pack'})
1050
1032
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1051
1033
        self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
1079
1061
    def test_repr(self):
1080
1062
        packs = self.get_packs()
1081
1063
        self.assertContainsRe(repr(packs),
1082
 
                              'RepositoryPackCollection(.*Repository(.*))')
 
1064
            'RepositoryPackCollection(.*Repository(.*))')
1083
1065
 
1084
1066
    def test__obsolete_packs(self):
1085
1067
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1100
1082
                         sorted(packs._pack_transport.list_dir('.')))
1101
1083
        # names[0] should not be present in the index anymore
1102
1084
        self.assertEqual(names[1:],
1103
 
                         sorted({osutils.splitext(n)[0] for n in
1104
 
                                 packs._index_transport.list_dir('.')}))
 
1085
            sorted({osutils.splitext(n)[0] for n in
 
1086
                        packs._index_transport.list_dir('.')}))
1105
1087
 
1106
1088
    def test__obsolete_packs_missing_directory(self):
1107
1089
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1117
1099
                         sorted(packs._pack_transport.list_dir('.')))
1118
1100
        # names[0] should not be present in the index anymore
1119
1101
        self.assertEqual(names[1:],
1120
 
                         sorted({osutils.splitext(n)[0] for n in
1121
 
                                 packs._index_transport.list_dir('.')}))
 
1102
            sorted({osutils.splitext(n)[0] for n in
 
1103
                        packs._index_transport.list_dir('.')}))
1122
1104
 
1123
1105
    def test_pack_distribution_zero(self):
1124
1106
        packs = self.get_packs()
1132
1114
    def test_pack_distribution_one_to_nine(self):
1133
1115
        packs = self.get_packs()
1134
1116
        self.assertEqual([1],
1135
 
                         packs.pack_distribution(1))
 
1117
            packs.pack_distribution(1))
1136
1118
        self.assertEqual([1, 1],
1137
 
                         packs.pack_distribution(2))
 
1119
            packs.pack_distribution(2))
1138
1120
        self.assertEqual([1, 1, 1],
1139
 
                         packs.pack_distribution(3))
 
1121
            packs.pack_distribution(3))
1140
1122
        self.assertEqual([1, 1, 1, 1],
1141
 
                         packs.pack_distribution(4))
 
1123
            packs.pack_distribution(4))
1142
1124
        self.assertEqual([1, 1, 1, 1, 1],
1143
 
                         packs.pack_distribution(5))
 
1125
            packs.pack_distribution(5))
1144
1126
        self.assertEqual([1, 1, 1, 1, 1, 1],
1145
 
                         packs.pack_distribution(6))
 
1127
            packs.pack_distribution(6))
1146
1128
        self.assertEqual([1, 1, 1, 1, 1, 1, 1],
1147
 
                         packs.pack_distribution(7))
 
1129
            packs.pack_distribution(7))
1148
1130
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1],
1149
 
                         packs.pack_distribution(8))
 
1131
            packs.pack_distribution(8))
1150
1132
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1, 1],
1151
 
                         packs.pack_distribution(9))
 
1133
            packs.pack_distribution(9))
1152
1134
 
1153
1135
    def test_pack_distribution_stable_at_boundaries(self):
1154
1136
        """When there are multi-rev packs the counts are stable."""
1185
1167
    def test_plan_pack_operations_2010_combines_smallest_two(self):
1186
1168
        packs = self.get_packs()
1187
1169
        existing_packs = [(1999, "big"), (9, "medium"), (1, "single2"),
1188
 
                          (1, "single1")]
 
1170
            (1, "single1")]
1189
1171
        # rev count - 2010 -> 2x1000 + 1x10 (3)
1190
1172
        pack_operations = packs.plan_autopack_combinations(
1191
1173
            existing_packs, [1000, 1000, 10])
1258
1240
        inv_index = GraphIndex(packs._index_transport, name + '.iix', sizes[1])
1259
1241
        txt_index = GraphIndex(packs._index_transport, name + '.tix', sizes[2])
1260
1242
        sig_index = GraphIndex(packs._index_transport, name + '.six', sizes[3])
1261
 
        self.assertEqual(
1262
 
            pack_repo.ExistingPack(
1263
 
                packs._pack_transport, name, rev_index, inv_index, txt_index,
1264
 
                sig_index), pack_1)
 
1243
        self.assertEqual(pack_repo.ExistingPack(packs._pack_transport,
 
1244
            name, rev_index, inv_index, txt_index, sig_index), pack_1)
1265
1245
        # and the same instance should be returned on successive calls.
1266
1246
        self.assertTrue(pack_1 is packs.get_pack_by_name(name))
1267
1247
 
1279
1259
        self.assertTrue(packs.reload_pack_names())
1280
1260
        self.assertEqual(new_names, packs.names())
1281
1261
        # And the repository can access the new revision
1282
 
        self.assertEqual({rev4: (revs[-1],)}, r.get_parent_map([rev4]))
 
1262
        self.assertEqual({rev4:(revs[-1],)}, r.get_parent_map([rev4]))
1283
1263
        self.assertFalse(packs.reload_pack_names())
1284
1264
 
1285
1265
    def test_reload_pack_names_added_and_removed(self):
1292
1272
        self.assertEqual(names, packs.names())
1293
1273
        self.assertTrue(packs.reload_pack_names())
1294
1274
        self.assertEqual(new_names, packs.names())
1295
 
        self.assertEqual({revs[-1]: (revs[-2],)}, r.get_parent_map([revs[-1]]))
 
1275
        self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
1296
1276
        self.assertFalse(packs.reload_pack_names())
1297
1277
 
1298
1278
    def test_reload_pack_names_preserves_pending(self):
1306
1286
        r.start_write_group()
1307
1287
        self.addCleanup(r.abort_write_group)
1308
1288
        r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
1309
 
            (b'text', b'rev'), (), None, b'content\n')])
 
1289
            ('text', 'rev'), (), None, 'content\n')])
1310
1290
        new_pack = packs._new_pack
1311
1291
        self.assertTrue(new_pack.data_inserted())
1312
1292
        new_pack.finish()
1316
1296
        packs._remove_pack_from_memory(removed_pack)
1317
1297
        names = packs.names()
1318
1298
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1319
 
        new_names = {x[0] for x in new_nodes}
1320
 
        self.assertEqual(names, sorted([x[0] for x in all_nodes]))
 
1299
        new_names = {x[0][0] for x in new_nodes}
 
1300
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1321
1301
        self.assertEqual(set(names) - set(orig_names), new_names)
1322
1302
        self.assertEqual({new_pack.name}, new_names)
1323
1303
        self.assertEqual([to_remove_name],
1324
 
                         sorted([x[0] for x in deleted_nodes]))
 
1304
                         sorted([x[0][0] for x in deleted_nodes]))
1325
1305
        packs.reload_pack_names()
1326
1306
        reloaded_names = packs.names()
1327
1307
        self.assertEqual(orig_at_load, packs._packs_at_load)
1328
1308
        self.assertEqual(names, reloaded_names)
1329
1309
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1330
 
        new_names = {x[0] for x in new_nodes}
1331
 
        self.assertEqual(names, sorted([x[0] for x in all_nodes]))
 
1310
        new_names = {x[0][0] for x in new_nodes}
 
1311
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1332
1312
        self.assertEqual(set(names) - set(orig_names), new_names)
1333
1313
        self.assertEqual({new_pack.name}, new_names)
1334
1314
        self.assertEqual([to_remove_name],
1335
 
                         sorted([x[0] for x in deleted_nodes]))
 
1315
                         sorted([x[0][0] for x in deleted_nodes]))
1336
1316
 
1337
1317
    def test_autopack_obsoletes_new_pack(self):
1338
1318
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1340
1320
        packs.pack_distribution = lambda x: [10]
1341
1321
        r.start_write_group()
1342
1322
        r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
1343
 
            (b'bogus-rev',), (), None, b'bogus-content\n')])
 
1323
            ('bogus-rev',), (), None, 'bogus-content\n')])
1344
1324
        # This should trigger an autopack, which will combine everything into a
1345
1325
        # single pack file.
1346
 
        r.commit_write_group()
 
1326
        new_names = r.commit_write_group()
1347
1327
        names = packs.names()
1348
1328
        self.assertEqual(1, len(names))
1349
1329
        self.assertEqual([names[0] + '.pack'],
1355
1335
        # full-pack via the other repo which will cause us to re-evaluate and
1356
1336
        # decide we don't need to do anything
1357
1337
        orig_execute = packs._execute_pack_operations
1358
 
 
1359
1338
        def _munged_execute_pack_ops(*args, **kwargs):
1360
1339
            tree.branch.repository.pack()
1361
1340
            return orig_execute(*args, **kwargs)
1474
1453
            index_class=BTreeGraphIndex,
1475
1454
            use_chk_index=False)
1476
1455
        pack = pack_repo.NewPack(collection)
1477
 
        self.addCleanup(pack.abort)  # Make sure the write stream gets closed
 
1456
        self.addCleanup(pack.abort) # Make sure the write stream gets closed
1478
1457
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
1479
1458
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
1480
1459
        self.assertIsInstance(pack._hash, type(osutils.md5()))
1494
1473
        builder = self.make_branch_builder('.', format="1.9")
1495
1474
        builder.start_series()
1496
1475
        builder.build_snapshot(None, [
1497
 
            ('add', ('', b'root-id', 'directory', None)),
1498
 
            ('add', ('f', b'f-id', 'file', b'content\n'))],
1499
 
            revision_id=b'A')
1500
 
        builder.build_snapshot([b'A'],
1501
 
                               [('modify', ('f', b'new-content\n'))],
1502
 
                               revision_id=b'B')
1503
 
        builder.build_snapshot([b'B'],
1504
 
                               [('modify', ('f', b'third-content\n'))],
1505
 
                               revision_id=b'C')
1506
 
        builder.build_snapshot([b'C'],
1507
 
                               [('modify', ('f', b'fourth-content\n'))],
1508
 
                               revision_id=b'D')
 
1476
            ('add', ('', 'root-id', 'directory', None)),
 
1477
            ('add', ('f', 'f-id', 'file', 'content\n'))],
 
1478
            revision_id='A')
 
1479
        builder.build_snapshot(['A'],
 
1480
            [('modify', ('f', 'new-content\n'))],
 
1481
            revision_id='B')
 
1482
        builder.build_snapshot(['B'],
 
1483
            [('modify', ('f', 'third-content\n'))],
 
1484
            revision_id='C')
 
1485
        builder.build_snapshot(['C'],
 
1486
            [('modify', ('f', 'fourth-content\n'))],
 
1487
            revision_id='D')
1509
1488
        b = builder.get_branch()
1510
1489
        b.lock_read()
1511
1490
        builder.finish_series()
1515
1494
        # ['D', 'C', 'B', 'A']
1516
1495
        packs = b.repository._pack_collection.packs
1517
1496
        packer = knitpack_repo.KnitPacker(b.repository._pack_collection,
1518
 
                                          packs, 'testing',
1519
 
                                          revision_ids=[b'B', b'C'])
 
1497
                                  packs, 'testing',
 
1498
                                  revision_ids=['B', 'C'])
1520
1499
        # Now, when we are copying the B & C revisions, their pack files should
1521
1500
        # be moved to the front of the stack
1522
1501
        # The new ordering moves B & C to the front of the .packs attribute,
1523
1502
        # and leaves the others in the original order.
1524
1503
        new_packs = [packs[1], packs[2], packs[0], packs[3]]
1525
 
        packer.pack()
 
1504
        new_pack = packer.pack()
1526
1505
        self.assertEqual(new_packs, packer.packs)
1527
1506
 
1528
1507
 
1535
1514
 
1536
1515
    def test_open_pack_will_optimise(self):
1537
1516
        packer = knitpack_repo.OptimisingKnitPacker(self.get_pack_collection(),
1538
 
                                                    [], '.test')
 
1517
                                            [], '.test')
1539
1518
        new_pack = packer.open_pack()
1540
 
        self.addCleanup(new_pack.abort)  # ensure cleanup
 
1519
        self.addCleanup(new_pack.abort) # ensure cleanup
1541
1520
        self.assertIsInstance(new_pack, pack_repo.NewPack)
1542
1521
        self.assertTrue(new_pack.revision_index._optimize_for_size)
1543
1522
        self.assertTrue(new_pack.inventory_index._optimize_for_size)
1551
1530
        builder = self.make_branch_builder('source')
1552
1531
        builder.start_series()
1553
1532
        builder.build_snapshot(None, [
1554
 
            ('add', ('', b'root-id', 'directory', None)),
1555
 
            ('add', ('file', b'file-id', 'file', b'content\n')),
1556
 
            ], revision_id=b'A')
1557
 
        builder.build_snapshot([b'A'], [
1558
 
            ('add', ('dir', b'dir-id', 'directory', None))],
1559
 
            revision_id=b'B')
1560
 
        builder.build_snapshot([b'B'], [
1561
 
            ('modify', ('file', b'new content\n'))],
1562
 
            revision_id=b'C')
 
1533
            ('add', ('', 'root-id', 'directory', None)),
 
1534
            ('add', ('file', 'file-id', 'file', 'content\n')),
 
1535
            ], revision_id='A')
 
1536
        builder.build_snapshot(['A'], [
 
1537
            ('add', ('dir', 'dir-id', 'directory', None))],
 
1538
            revision_id='B')
 
1539
        builder.build_snapshot(['B'], [
 
1540
            ('modify', ('file', 'new content\n'))],
 
1541
            revision_id='C')
1563
1542
        builder.finish_series()
1564
1543
        return builder.get_branch()
1565
1544
 
1575
1554
                  pack_name_with_rev_C_content)
1576
1555
        """
1577
1556
        b_source = self.make_abc_branch()
1578
 
        b_base = b_source.controldir.sprout(
1579
 
            'base', revision_id=b'A').open_branch()
1580
 
        b_stacked = b_base.controldir.sprout(
1581
 
            'stacked', stacked=True).open_branch()
 
1557
        b_base = b_source.controldir.sprout('base', revision_id='A').open_branch()
 
1558
        b_stacked = b_base.controldir.sprout('stacked', stacked=True).open_branch()
1582
1559
        b_stacked.lock_write()
1583
1560
        self.addCleanup(b_stacked.unlock)
1584
 
        b_stacked.fetch(b_source, b'B')
 
1561
        b_stacked.fetch(b_source, 'B')
1585
1562
        # Now re-open the stacked repo directly (no fallbacks) so that we can
1586
1563
        # fill in the A rev.
1587
1564
        repo_not_stacked = b_stacked.controldir.open_repository()
1589
1566
        self.addCleanup(repo_not_stacked.unlock)
1590
1567
        # Now we should have a pack file with A's inventory, but not its
1591
1568
        # Revision
1592
 
        self.assertEqual([(b'A',), (b'B',)],
 
1569
        self.assertEqual([('A',), ('B',)],
1593
1570
                         sorted(repo_not_stacked.inventories.keys()))
1594
 
        self.assertEqual([(b'B',)],
 
1571
        self.assertEqual([('B',)],
1595
1572
                         sorted(repo_not_stacked.revisions.keys()))
1596
1573
        stacked_pack_names = repo_not_stacked._pack_collection.names()
1597
1574
        # We have a couple names here, figure out which has A's inventory
1598
1575
        for name in stacked_pack_names:
1599
1576
            pack = repo_not_stacked._pack_collection.get_pack_by_name(name)
1600
1577
            keys = [n[1] for n in pack.inventory_index.iter_all_entries()]
1601
 
            if (b'A',) in keys:
 
1578
            if ('A',) in keys:
1602
1579
                inv_a_pack_name = name
1603
1580
                break
1604
1581
        else:
1605
1582
            self.fail('Could not find pack containing A\'s inventory')
1606
 
        repo_not_stacked.fetch(b_source.repository, b'A')
1607
 
        self.assertEqual([(b'A',), (b'B',)],
 
1583
        repo_not_stacked.fetch(b_source.repository, 'A')
 
1584
        self.assertEqual([('A',), ('B',)],
1608
1585
                         sorted(repo_not_stacked.revisions.keys()))
1609
1586
        new_pack_names = set(repo_not_stacked._pack_collection.names())
1610
1587
        rev_a_pack_names = new_pack_names.difference(stacked_pack_names)
1611
1588
        self.assertEqual(1, len(rev_a_pack_names))
1612
1589
        rev_a_pack_name = list(rev_a_pack_names)[0]
1613
1590
        # Now fetch 'C', so we have a couple pack files to join
1614
 
        repo_not_stacked.fetch(b_source.repository, b'C')
 
1591
        repo_not_stacked.fetch(b_source.repository, 'C')
1615
1592
        rev_c_pack_names = set(repo_not_stacked._pack_collection.names())
1616
1593
        rev_c_pack_names = rev_c_pack_names.difference(new_pack_names)
1617
1594
        self.assertEqual(1, len(rev_c_pack_names))
1629
1606
        a_pack = repo._pack_collection.get_pack_by_name(rev_a_pack_name)
1630
1607
        c_pack = repo._pack_collection.get_pack_by_name(rev_c_pack_name)
1631
1608
        packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
1632
 
                                                [a_pack, c_pack], '.test-pack')
 
1609
                    [a_pack, c_pack], '.test-pack')
1633
1610
        # This would raise ValueError in bug #437003, but should not raise an
1634
1611
        # error once fixed.
1635
1612
        packer.pack()
1642
1619
        inv_a_pack = repo._pack_collection.get_pack_by_name(inv_a_pack_name)
1643
1620
        repo._pack_collection._remove_pack_from_memory(inv_a_pack)
1644
1621
        packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
1645
 
                                                repo._pack_collection.all_packs(), '.test-pack')
 
1622
            repo._pack_collection.all_packs(), '.test-pack')
1646
1623
        e = self.assertRaises(ValueError, packer.pack)
1647
1624
        packer.new_pack.abort()
1648
1625
        self.assertContainsRe(str(e),
1649
 
                              r"We are missing inventories for revisions: .*'A'")
 
1626
            r"We are missing inventories for revisions: .*'A'")
1650
1627
 
1651
1628
 
1652
1629
class TestCrossFormatPacks(TestCaseWithTransport):
1687
1664
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
1688
1665
        source_tree.lock_write()
1689
1666
        self.addCleanup(source_tree.unlock)
1690
 
        source_tree.commit('foo')
 
1667
        tip = source_tree.commit('foo')
1691
1668
        target = self.make_repository('target', format=target_fmt)
1692
1669
        target.lock_write()
1693
1670
        self.addCleanup(target.unlock)
1756
1733
        repo.lock_write()
1757
1734
        repo._format.features[b"makes-cheese-sandwich"] = b"required"
1758
1735
        self.assertRaises(bzrdir.MissingFeature,
1759
 
                          repo._format.check_support_status, False)
 
1736
            repo._format.check_support_status, False)