/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/tests/test_repository.py

Merge test-run support.

Show diffs side-by-side

added added

removed removed

Lines of Context:
27
27
import breezy
28
28
from breezy.errors import (
29
29
    UnknownFormatError,
 
30
    UnsupportedFormatError,
30
31
    )
31
32
from breezy import (
32
33
    tests,
72
73
        private_default = old_default().repository_format.__class__
73
74
        old_format = repository.format_registry.get_default()
74
75
        self.assertTrue(isinstance(old_format, private_default))
75
 
 
76
76
        def make_sample_bzrdir():
77
77
            my_bzrdir = bzrdir.BzrDirMetaFormat1()
78
78
            my_bzrdir.repository_format = SampleRepositoryFormat()
137
137
        # create a branch with a few known format objects.
138
138
        # this is not quite the same as
139
139
        self.build_tree(["foo/", "bar/"])
140
 
 
141
140
        def check_format(format, url):
142
141
            dir = format._matchingcontroldir.initialize(url)
143
142
            format.initialize(dir)
144
 
            found_format = bzrrepository.RepositoryFormatMetaDir.find_format(
145
 
                dir)
 
143
            t = transport.get_transport_from_path(url)
 
144
            found_format = bzrrepository.RepositoryFormatMetaDir.find_format(dir)
146
145
            self.assertIsInstance(found_format, format.__class__)
147
146
        check_format(repository.format_registry.get_default(), "bar")
148
147
 
158
157
                b"Sample .bzr repository format."),
159
158
            SampleRepositoryFormat)
160
159
        self.assertRaises(AssertionError,
161
 
                          SampleRepositoryFormat.from_string,
162
 
                          b"Different .bzr repository format.")
 
160
            SampleRepositoryFormat.from_string,
 
161
                b"Different .bzr repository format.")
163
162
 
164
163
    def test_find_format_unknown_format(self):
165
164
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
171
170
    def test_find_format_with_features(self):
172
171
        tree = self.make_branch_and_tree('.', format='2a')
173
172
        tree.branch.repository.update_feature_flags({b"name": b"necessity"})
174
 
        found_format = bzrrepository.RepositoryFormatMetaDir.find_format(
175
 
            tree.controldir)
176
 
        self.assertIsInstance(
177
 
            found_format, bzrrepository.RepositoryFormatMetaDir)
 
173
        found_format = bzrrepository.RepositoryFormatMetaDir.find_format(tree.controldir)
 
174
        self.assertIsInstance(found_format, bzrrepository.RepositoryFormatMetaDir)
178
175
        self.assertEqual(found_format.features.get(b"name"), b"necessity")
179
 
        self.assertRaises(
180
 
            bzrdir.MissingFeature, found_format.check_support_status, True)
181
 
        self.addCleanup(
182
 
            bzrrepository.RepositoryFormatMetaDir.unregister_feature, b"name")
 
176
        self.assertRaises(bzrdir.MissingFeature, found_format.check_support_status,
 
177
            True)
 
178
        self.addCleanup(bzrrepository.RepositoryFormatMetaDir.unregister_feature,
 
179
            b"name")
183
180
        bzrrepository.RepositoryFormatMetaDir.register_feature(b"name")
184
181
        found_format.check_support_status(True)
185
182
 
193
190
    def test_register_unregister_format(self):
194
191
        format = SampleRepositoryFormat()
195
192
        self.registry.register(format)
196
 
        self.assertEqual(format, self.registry.get(
197
 
            b"Sample .bzr repository format."))
 
193
        self.assertEqual(format, self.registry.get(b"Sample .bzr repository format."))
198
194
        self.registry.remove(format)
199
 
        self.assertRaises(KeyError, self.registry.get,
200
 
                          b"Sample .bzr repository format.")
 
195
        self.assertRaises(KeyError, self.registry.get, b"Sample .bzr repository format.")
201
196
 
202
197
    def test_get_all(self):
203
198
        format = SampleRepositoryFormat()
214
209
    def test_register_extra_lazy(self):
215
210
        self.assertEqual([], self.registry._get_all())
216
211
        self.registry.register_extra_lazy("breezy.tests.test_repository",
217
 
                                          "SampleExtraRepositoryFormat")
 
212
            "SampleExtraRepositoryFormat")
218
213
        formats = self.registry._get_all()
219
214
        self.assertEqual(1, len(formats))
220
215
        self.assertIsInstance(formats[0], SampleExtraRepositoryFormat)
224
219
 
225
220
    def test_attribute__fetch_order(self):
226
221
        """Knits need topological data insertion."""
227
 
        repo = self.make_repository(
228
 
            '.', format=controldir.format_registry.get('knit')())
 
222
        repo = self.make_repository('.',
 
223
                format=controldir.format_registry.get('knit')())
229
224
        self.assertEqual('topological', repo._format._fetch_order)
230
225
 
231
226
    def test_attribute__fetch_uses_deltas(self):
232
227
        """Knits reuse deltas."""
233
 
        repo = self.make_repository(
234
 
            '.', format=controldir.format_registry.get('knit')())
 
228
        repo = self.make_repository('.',
 
229
                format=controldir.format_registry.get('knit')())
235
230
        self.assertEqual(True, repo._format._fetch_uses_deltas)
236
231
 
237
232
    def test_disk_layout(self):
255
250
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
256
251
        self.check_knits(t)
257
252
        # Check per-file knits.
258
 
        control.create_branch()
 
253
        branch = control.create_branch()
259
254
        tree = control.create_workingtree()
260
255
        tree.add(['foo'], [b'Nasty-IdC:'], ['file'])
261
256
        tree.put_file_bytes_non_atomic('foo', b'')
262
257
        tree.commit('1st post', rev_id=b'foo')
263
258
        self.assertHasKnit(t, 'knits/e8/%254easty-%2549d%2543%253a',
264
 
                           b'\nfoo fulltext 0 81  :')
 
259
            '\nfoo fulltext 0 81  :')
265
260
 
266
 
    def assertHasKnit(self, t, knit_name, extra_content=b''):
 
261
    def assertHasKnit(self, t, knit_name, extra_content=''):
267
262
        """Assert that knit_name exists on t."""
268
 
        self.assertEqualDiff(b'# bzr knit index 8\n' + extra_content,
 
263
        self.assertEqualDiff('# bzr knit index 8\n' + extra_content,
269
264
                             t.get(knit_name + '.kndx').read())
270
265
 
271
266
    def check_knits(self, t):
276
271
 
277
272
    def test_shared_disk_layout(self):
278
273
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
279
 
        knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
 
274
        repo = knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
280
275
        # we want:
281
276
        # format 'Bazaar-NG Knit Repository Format 1'
282
277
        # lock: is a directory
285
280
        # empty weaves directory
286
281
        # a 'shared-storage' marker file.
287
282
        t = control.get_repository_transport(None)
288
 
        with t.get('format') as f:
289
 
            self.assertEqualDiff(b'Bazaar-NG Knit Repository Format 1',
290
 
                                 f.read())
 
283
        self.assertEqualDiff('Bazaar-NG Knit Repository Format 1',
 
284
                             t.get('format').read())
291
285
        # XXX: no locks left when unlocked at the moment
292
286
        # self.assertEqualDiff('', t.get('lock').read())
293
 
        self.assertEqualDiff(b'', t.get('shared-storage').read())
 
287
        self.assertEqualDiff('', t.get('shared-storage').read())
294
288
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
295
289
        self.check_knits(t)
296
290
 
297
291
    def test_shared_no_tree_disk_layout(self):
298
292
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
299
 
        repo = knitrepo.RepositoryFormatKnit1().initialize(
300
 
            control, shared=True)
 
293
        repo = knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
301
294
        repo.set_make_working_trees(False)
302
295
        # we want:
303
296
        # format 'Bazaar-NG Knit Repository Format 1'
307
300
        # empty weaves directory
308
301
        # a 'shared-storage' marker file.
309
302
        t = control.get_repository_transport(None)
310
 
        with t.get('format') as f:
311
 
            self.assertEqualDiff(b'Bazaar-NG Knit Repository Format 1',
312
 
                                 f.read())
 
303
        self.assertEqualDiff('Bazaar-NG Knit Repository Format 1',
 
304
                             t.get('format').read())
313
305
        # XXX: no locks left when unlocked at the moment
314
306
        # self.assertEqualDiff('', t.get('lock').read())
315
 
        self.assertEqualDiff(b'', t.get('shared-storage').read())
316
 
        self.assertEqualDiff(b'', t.get('no-working-trees').read())
 
307
        self.assertEqualDiff('', t.get('shared-storage').read())
 
308
        self.assertEqualDiff('', t.get('no-working-trees').read())
317
309
        repo.set_make_working_trees(True)
318
310
        self.assertFalse(t.has('no-working-trees'))
319
311
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
326
318
        the whole inventory. So we grab the one from the expected text. Which
327
319
        is valid when the api is not being abused.
328
320
        """
329
 
        repo = self.make_repository(
330
 
            '.', format=controldir.format_registry.get('knit')())
331
 
        inv_xml = b'<inventory format="5">\n</inventory>\n'
332
 
        inv = repo._deserialise_inventory(b'test-rev-id', [inv_xml])
333
 
        self.assertEqual(b'test-rev-id', inv.root.revision)
 
321
        repo = self.make_repository('.',
 
322
                format=controldir.format_registry.get('knit')())
 
323
        inv_xml = '<inventory format="5">\n</inventory>\n'
 
324
        inv = repo._deserialise_inventory('test-rev-id', inv_xml)
 
325
        self.assertEqual('test-rev-id', inv.root.revision)
334
326
 
335
327
    def test_deserialise_uses_global_revision_id(self):
336
328
        """If it is set, then we re-use the global revision id"""
337
 
        repo = self.make_repository(
338
 
            '.', format=controldir.format_registry.get('knit')())
339
 
        inv_xml = (b'<inventory format="5" revision_id="other-rev-id">\n'
340
 
                   b'</inventory>\n')
 
329
        repo = self.make_repository('.',
 
330
                format=controldir.format_registry.get('knit')())
 
331
        inv_xml = ('<inventory format="5" revision_id="other-rev-id">\n'
 
332
                   '</inventory>\n')
341
333
        # Arguably, the deserialise_inventory should detect a mismatch, and
342
334
        # raise an error, rather than silently using one revision_id over the
343
335
        # other.
344
336
        self.assertRaises(AssertionError, repo._deserialise_inventory,
345
 
                          b'test-rev-id', [inv_xml])
346
 
        inv = repo._deserialise_inventory(b'other-rev-id', [inv_xml])
347
 
        self.assertEqual(b'other-rev-id', inv.root.revision)
 
337
            'test-rev-id', inv_xml)
 
338
        inv = repo._deserialise_inventory('other-rev-id', inv_xml)
 
339
        self.assertEqual('other-rev-id', inv.root.revision)
348
340
 
349
341
    def test_supports_external_lookups(self):
350
 
        repo = self.make_repository(
351
 
            '.', format=controldir.format_registry.get('knit')())
 
342
        repo = self.make_repository('.',
 
343
                format=controldir.format_registry.get('knit')())
352
344
        self.assertFalse(repo._format.supports_external_lookups)
353
345
 
354
346
 
382
374
    def is_compatible(repo_source, repo_target):
383
375
        """InterDummy is compatible with DummyRepository."""
384
376
        return (isinstance(repo_source, DummyRepository) and
385
 
                isinstance(repo_target, DummyRepository))
 
377
            isinstance(repo_target, DummyRepository))
386
378
 
387
379
 
388
380
class TestInterRepository(TestCaseWithTransport):
427
419
        repo = self.make_repository('.')
428
420
        # hack dummies to look like repo somewhat.
429
421
        dummy_a._serializer = repo._serializer
430
 
        dummy_a._format.supports_tree_reference = (
431
 
            repo._format.supports_tree_reference)
 
422
        dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
432
423
        dummy_a._format.rich_root_data = repo._format.rich_root_data
433
 
        dummy_a._format.supports_full_versioned_files = (
434
 
            repo._format.supports_full_versioned_files)
 
424
        dummy_a._format.supports_full_versioned_files = repo._format.supports_full_versioned_files
435
425
        dummy_b._serializer = repo._serializer
436
 
        dummy_b._format.supports_tree_reference = (
437
 
            repo._format.supports_tree_reference)
 
426
        dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
438
427
        dummy_b._format.rich_root_data = repo._format.rich_root_data
439
 
        dummy_b._format.supports_full_versioned_files = (
440
 
            repo._format.supports_full_versioned_files)
 
428
        dummy_b._format.supports_full_versioned_files = repo._format.supports_full_versioned_files
441
429
        repository.InterRepository.register_optimiser(InterDummy)
442
430
        try:
443
431
            # we should get the default for something InterDummy returns False
477
465
        target_format = TestRepositoryFormat2()
478
466
        repository.format_registry.register(source_format)
479
467
        self.addCleanup(repository.format_registry.remove,
480
 
                        source_format)
 
468
            source_format)
481
469
        repository.format_registry.register(target_format)
482
470
        self.addCleanup(repository.format_registry.remove,
483
 
                        target_format)
 
471
            target_format)
484
472
        t = self.get_transport()
485
473
        t.mkdir('repository')
486
474
        repo_dir = bzrdir.BzrDirMetaFormat1().initialize('repository')
513
501
        format = bzrdir.BzrDirMetaFormat1()
514
502
        format.repository_format = knitrepo.RepositoryFormatKnit1()
515
503
        tree = self.make_branch_and_tree('.', format)
516
 
        tree.commit("Dull commit", rev_id=b"dull")
 
504
        tree.commit("Dull commit", rev_id="dull")
517
505
        revision_tree = tree.branch.repository.revision_tree(b'dull')
518
 
        with revision_tree.lock_read():
519
 
            self.assertRaises(
520
 
                errors.NoSuchFile, revision_tree.get_file_lines, u'')
 
506
        revision_tree.lock_read()
 
507
        try:
 
508
            self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
 
509
                u'', revision_tree.get_root_id())
 
510
        finally:
 
511
            revision_tree.unlock()
521
512
        format = bzrdir.BzrDirMetaFormat1()
522
513
        format.repository_format = knitrepo.RepositoryFormatKnit3()
523
514
        upgrade.Convert('.', format)
524
515
        tree = workingtree.WorkingTree.open('.')
525
516
        revision_tree = tree.branch.repository.revision_tree(b'dull')
526
 
        with revision_tree.lock_read():
527
 
            revision_tree.get_file_lines(u'')
 
517
        revision_tree.lock_read()
 
518
        try:
 
519
            revision_tree.get_file_lines(u'', revision_tree.get_root_id())
 
520
        finally:
 
521
            revision_tree.unlock()
528
522
        tree.commit("Another dull commit", rev_id=b'dull2')
529
523
        revision_tree = tree.branch.repository.revision_tree(b'dull2')
530
524
        revision_tree.lock_read()
531
525
        self.addCleanup(revision_tree.unlock)
532
 
        self.assertEqual(b'dull', revision_tree.get_file_revision(u''))
 
526
        self.assertEqual('dull',
 
527
                revision_tree.get_file_revision(u'', revision_tree.get_root_id()))
533
528
 
534
529
    def test_supports_external_lookups(self):
535
530
        format = bzrdir.BzrDirMetaFormat1()
584
579
        builder.start_series()
585
580
        builder.build_snapshot(None, [
586
581
            ('add', ('', b'root-id', 'directory', '')),
587
 
            ('add', ('file', b'file-id', 'file', b'content\n'))],
 
582
            ('add', ('file', b'file-id', 'file', 'content\n'))],
588
583
            revision_id=b'1')
589
584
        builder.build_snapshot([b'1'], [
590
585
            ('modify', ('file', b'content-2\n'))],
608
603
        builder.start_series()
609
604
        builder.build_snapshot(None, [
610
605
            ('add', ('', b'root-id', 'directory', '')),
611
 
            ('add', ('file', b'file-id', 'file', b'content\n'))],
 
606
            ('add', ('file', b'file-id', 'file', 'content\n'))],
612
607
            revision_id=b'1')
613
608
        builder.build_snapshot([b'1'], [
614
609
            ('modify', ('file', b'content-2\n'))],
644
639
        inv.parent_id_basename_to_file_id._ensure_root()
645
640
        inv.id_to_entry._ensure_root()
646
641
        self.assertEqual(65536, inv.id_to_entry._root_node.maximum_size)
647
 
        self.assertEqual(
648
 
            65536, inv.parent_id_basename_to_file_id._root_node.maximum_size)
 
642
        self.assertEqual(65536,
 
643
            inv.parent_id_basename_to_file_id._root_node.maximum_size)
649
644
 
650
645
    def test_autopack_unchanged_chk_nodes(self):
651
646
        # at 20 unchanged commits, chk pages are packed that are split into
696
691
 
697
692
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
698
693
        source_builder = self.make_branch_builder('source',
699
 
                                                  format='2a')
 
694
                            format='2a')
700
695
        # We have to build a fairly large tree, so that we are sure the chk
701
696
        # pages will have split into multiple pages.
702
697
        entries = [('add', ('', b'a-root-id', 'directory', None))]
704
699
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
705
700
                fname = i + j
706
701
                fid = fname.encode('utf-8') + b'-id'
707
 
                content = b'content for %s\n' % (fname.encode('utf-8'),)
 
702
                content = 'content for %s\n' % (fname,)
708
703
                entries.append(('add', (fname, fid, 'file', content)))
709
704
        source_builder.start_series()
710
705
        source_builder.build_snapshot(None, entries, revision_id=b'rev-1')
726
721
        # On a regular pass, getting the inventories and chk pages for rev-2
727
722
        # would only get the newly created chk pages
728
723
        search = vf_search.SearchResult({b'rev-2'}, {b'rev-1'}, 1,
729
 
                                        {b'rev-2'})
730
 
        simple_chk_records = set()
 
724
                                    {b'rev-2'})
 
725
        simple_chk_records = []
731
726
        for vf_name, substream in source.get_stream(search):
732
727
            if vf_name == 'chk_bytes':
733
728
                for record in substream:
734
 
                    simple_chk_records.add(record.key)
 
729
                    simple_chk_records.append(record.key)
735
730
            else:
736
731
                for _ in substream:
737
732
                    continue
738
733
        # 3 pages, the root (InternalNode), + 2 pages which actually changed
739
 
        self.assertEqual({(b'sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
740
 
                          (b'sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
741
 
                          (b'sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
742
 
                          (b'sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)},
743
 
                         set(simple_chk_records))
 
734
        self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
 
735
                          ('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
 
736
                          ('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
 
737
                          ('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
 
738
                         simple_chk_records)
744
739
        # Now, when we do a similar call using 'get_stream_for_missing_keys'
745
740
        # we should get a much larger set of pages.
746
 
        missing = [('inventories', b'rev-2')]
747
 
        full_chk_records = set()
 
741
        missing = [('inventories', 'rev-2')]
 
742
        full_chk_records = []
748
743
        for vf_name, substream in source.get_stream_for_missing_keys(missing):
749
744
            if vf_name == 'inventories':
750
745
                for record in substream:
751
 
                    self.assertEqual((b'rev-2',), record.key)
 
746
                    self.assertEqual(('rev-2',), record.key)
752
747
            elif vf_name == 'chk_bytes':
753
748
                for record in substream:
754
 
                    full_chk_records.add(record.key)
 
749
                    full_chk_records.append(record.key)
755
750
            else:
756
751
                self.fail('Should not be getting a stream of %s' % (vf_name,))
757
752
        # We have 257 records now. This is because we have 1 root page, and 256
774
769
        source = self.make_repository('source', format='pack-0.92')
775
770
        target = self.make_repository('target', format='pack-0.92')
776
771
        stream_source = source._get_source(target._format)
777
 
        self.assertIsInstance(
778
 
            stream_source, knitpack_repo.KnitPackStreamSource)
 
772
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
779
773
 
780
774
    def test_source_to_exact_pack_rich_root_pack(self):
781
775
        source = self.make_repository('source', format='rich-root-pack')
782
776
        target = self.make_repository('target', format='rich-root-pack')
783
777
        stream_source = source._get_source(target._format)
784
 
        self.assertIsInstance(
785
 
            stream_source, knitpack_repo.KnitPackStreamSource)
 
778
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
786
779
 
787
780
    def test_source_to_exact_pack_19(self):
788
781
        source = self.make_repository('source', format='1.9')
789
782
        target = self.make_repository('target', format='1.9')
790
783
        stream_source = source._get_source(target._format)
791
 
        self.assertIsInstance(
792
 
            stream_source, knitpack_repo.KnitPackStreamSource)
 
784
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
793
785
 
794
786
    def test_source_to_exact_pack_19_rich_root(self):
795
787
        source = self.make_repository('source', format='1.9-rich-root')
796
788
        target = self.make_repository('target', format='1.9-rich-root')
797
789
        stream_source = source._get_source(target._format)
798
 
        self.assertIsInstance(
799
 
            stream_source, knitpack_repo.KnitPackStreamSource)
 
790
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
800
791
 
801
792
    def test_source_to_remote_exact_pack_19(self):
802
793
        trans = self.make_smart_server('target')
805
796
        target = self.make_repository('target', format='1.9')
806
797
        target = repository.Repository.open(trans.base)
807
798
        stream_source = source._get_source(target._format)
808
 
        self.assertIsInstance(
809
 
            stream_source, knitpack_repo.KnitPackStreamSource)
 
799
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
810
800
 
811
801
    def test_stream_source_to_non_exact(self):
812
802
        source = self.make_repository('source', format='pack-0.92')
843
833
        super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
844
834
        self.builder = self.make_branch_builder('source')
845
835
        self.builder.start_series()
846
 
        self.builder.build_snapshot(
847
 
            None,
848
 
            [('add', ('', b'tree-root', 'directory', None))],
 
836
        self.builder.build_snapshot(None,
 
837
            [('add', ('', 'tree-root', 'directory', None))],
849
838
            revision_id=b'initial')
850
839
        self.repo = self.builder.get_branch().repository
851
840
        self.addCleanup(self.builder.finish_series)
852
841
 
853
842
    def assertParentIds(self, expected_result, rev_set):
854
 
        self.assertEqual(
855
 
            sorted(expected_result),
 
843
        self.assertEqual(sorted(expected_result),
856
844
            sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
857
845
 
858
846
    def test_simple(self):
859
847
        self.builder.build_snapshot(None, [], revision_id=b'revid1')
860
 
        self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2')
861
 
        rev_set = [b'revid2']
862
 
        self.assertParentIds([b'revid1'], rev_set)
 
848
        self.builder.build_snapshot(['revid1'], [], revision_id=b'revid2')
 
849
        rev_set = ['revid2']
 
850
        self.assertParentIds(['revid1'], rev_set)
863
851
 
864
852
    def test_not_first_parent(self):
865
853
        self.builder.build_snapshot(None, [], revision_id=b'revid1')
866
 
        self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2')
867
 
        self.builder.build_snapshot([b'revid2'], [], revision_id=b'revid3')
868
 
        rev_set = [b'revid3', b'revid2']
869
 
        self.assertParentIds([b'revid1'], rev_set)
 
854
        self.builder.build_snapshot(['revid1'], [], revision_id=b'revid2')
 
855
        self.builder.build_snapshot(['revid2'], [], revision_id=b'revid3')
 
856
        rev_set = ['revid3', 'revid2']
 
857
        self.assertParentIds(['revid1'], rev_set)
870
858
 
871
859
    def test_not_null(self):
872
 
        rev_set = [b'initial']
 
860
        rev_set = ['initial']
873
861
        self.assertParentIds([], rev_set)
874
862
 
875
863
    def test_not_null_set(self):
879
867
 
880
868
    def test_ghost(self):
881
869
        self.builder.build_snapshot(None, [], revision_id=b'revid1')
882
 
        rev_set = [b'ghost', b'revid1']
883
 
        self.assertParentIds([b'initial'], rev_set)
 
870
        rev_set = ['ghost', 'revid1']
 
871
        self.assertParentIds(['initial'], rev_set)
884
872
 
885
873
    def test_ghost_parent(self):
886
874
        self.builder.build_snapshot(None, [], revision_id=b'revid1')
887
 
        self.builder.build_snapshot(
888
 
            [b'revid1', b'ghost'], [], revision_id=b'revid2')
889
 
        rev_set = [b'revid2', b'revid1']
890
 
        self.assertParentIds([b'ghost', b'initial'], rev_set)
 
875
        self.builder.build_snapshot(['revid1', 'ghost'], [], revision_id=b'revid2')
 
876
        rev_set = ['revid2', 'revid1']
 
877
        self.assertParentIds(['ghost', 'initial'], rev_set)
891
878
 
892
879
    def test_righthand_parent(self):
893
880
        self.builder.build_snapshot(None, [], revision_id=b'revid1')
894
 
        self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2a')
895
 
        self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2b')
896
 
        self.builder.build_snapshot([b'revid2a', b'revid2b'], [],
 
881
        self.builder.build_snapshot(['revid1'], [], revision_id=b'revid2a')
 
882
        self.builder.build_snapshot(['revid1'], [], revision_id=b'revid2b')
 
883
        self.builder.build_snapshot(['revid2a', 'revid2b'], [],
897
884
                                    revision_id=b'revid3')
898
 
        rev_set = [b'revid3', b'revid2a']
899
 
        self.assertParentIds([b'revid1', b'revid2b'], rev_set)
 
885
        rev_set = ['revid3', 'revid2a']
 
886
        self.assertParentIds(['revid1', 'revid2b'], rev_set)
900
887
 
901
888
 
902
889
class TestWithBrokenRepo(TestCaseWithTransport):
915
902
            cleanups.append(repo.commit_write_group)
916
903
            # make rev1a: A well-formed revision, containing 'file1'
917
904
            inv = inventory.Inventory(revision_id=b'rev1a')
918
 
            inv.root.revision = b'rev1a'
919
 
            self.add_file(repo, inv, 'file1', b'rev1a', [])
920
 
            repo.texts.add_lines((inv.root.file_id, b'rev1a'), [], [])
921
 
            repo.add_inventory(b'rev1a', inv, [])
922
 
            revision = _mod_revision.Revision(
923
 
                b'rev1a',
 
905
            inv.root.revision = 'rev1a'
 
906
            self.add_file(repo, inv, 'file1', 'rev1a', [])
 
907
            repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
 
908
            repo.add_inventory('rev1a', inv, [])
 
909
            revision = _mod_revision.Revision('rev1a',
924
910
                committer='jrandom@example.com', timestamp=0,
925
911
                inventory_sha1='', timezone=0, message='foo', parent_ids=[])
926
 
            repo.add_revision(b'rev1a', revision, inv)
 
912
            repo.add_revision('rev1a', revision, inv)
927
913
 
928
914
            # make rev1b, which has no Revision, but has an Inventory, and
929
915
            # file1
930
916
            inv = inventory.Inventory(revision_id=b'rev1b')
931
 
            inv.root.revision = b'rev1b'
932
 
            self.add_file(repo, inv, 'file1', b'rev1b', [])
933
 
            repo.add_inventory(b'rev1b', inv, [])
 
917
            inv.root.revision = 'rev1b'
 
918
            self.add_file(repo, inv, 'file1', 'rev1b', [])
 
919
            repo.add_inventory('rev1b', inv, [])
934
920
 
935
921
            # make rev2, with file1 and file2
936
922
            # file2 is sane
937
923
            # file1 has 'rev1b' as an ancestor, even though this is not
938
924
            # mentioned by 'rev1a', making it an unreferenced ancestor
939
925
            inv = inventory.Inventory()
940
 
            self.add_file(repo, inv, 'file1', b'rev2', [b'rev1a', b'rev1b'])
941
 
            self.add_file(repo, inv, 'file2', b'rev2', [])
942
 
            self.add_revision(repo, b'rev2', inv, [b'rev1a'])
 
926
            self.add_file(repo, inv, 'file1', 'rev2', ['rev1a', 'rev1b'])
 
927
            self.add_file(repo, inv, 'file2', 'rev2', [])
 
928
            self.add_revision(repo, 'rev2', inv, ['rev1a'])
943
929
 
944
930
            # make ghost revision rev1c
945
931
            inv = inventory.Inventory()
946
 
            self.add_file(repo, inv, 'file2', b'rev1c', [])
 
932
            self.add_file(repo, inv, 'file2', 'rev1c', [])
947
933
 
948
934
            # make rev3 with file2
949
935
            # file2 refers to 'rev1c', which is a ghost in this repository, so
950
936
            # file2 cannot have rev1c as its ancestor.
951
937
            inv = inventory.Inventory()
952
 
            self.add_file(repo, inv, 'file2', b'rev3', [b'rev1c'])
953
 
            self.add_revision(repo, b'rev3', inv, [b'rev1c'])
 
938
            self.add_file(repo, inv, 'file2', 'rev3', ['rev1c'])
 
939
            self.add_revision(repo, 'rev3', inv, ['rev1c'])
954
940
            return repo
955
941
        finally:
956
942
            for cleanup in reversed(cleanups):
961
947
        inv.root.revision = revision_id
962
948
        repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
963
949
        repo.add_inventory(revision_id, inv, parent_ids)
964
 
        revision = _mod_revision.Revision(
965
 
            revision_id,
 
950
        revision = _mod_revision.Revision(revision_id,
966
951
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
967
952
            timezone=0, message='foo', parent_ids=parent_ids)
968
953
        repo.add_revision(revision_id, revision, inv)
969
954
 
970
955
    def add_file(self, repo, inv, filename, revision, parents):
971
 
        file_id = filename.encode('utf-8') + b'-id'
972
 
        content = [b'line\n']
973
 
        entry = inventory.InventoryFile(file_id, filename, b'TREE_ROOT')
 
956
        file_id = filename + '-id'
 
957
        entry = inventory.InventoryFile(file_id, filename, 'TREE_ROOT')
974
958
        entry.revision = revision
975
 
        entry.text_sha1 = osutils.sha_strings(content)
976
959
        entry.text_size = 0
977
960
        inv.add(entry)
978
961
        text_key = (file_id, revision)
979
962
        parent_keys = [(file_id, parent) for parent in parents]
980
 
        repo.texts.add_lines(text_key, parent_keys, content)
 
963
        repo.texts.add_lines(text_key, parent_keys, ['line\n'])
981
964
 
982
965
    def test_insert_from_broken_repo(self):
983
966
        """Inserting a data stream from a broken repository won't silently
994
977
        empty_repo.lock_read()
995
978
        self.addCleanup(empty_repo.unlock)
996
979
        text = next(empty_repo.texts.get_record_stream(
997
 
            [(b'file2-id', b'rev3')], 'topological', True))
998
 
        self.assertEqual(b'line\n', text.get_bytes_as('fulltext'))
 
980
            [('file2-id', 'rev3')], 'topological', True))
 
981
        self.assertEqual('line\n', text.get_bytes_as('fulltext'))
999
982
 
1000
983
 
1001
984
class TestRepositoryPackCollection(TestCaseWithTransport):
1029
1012
    def test__clear_obsolete_packs(self):
1030
1013
        packs = self.get_packs()
1031
1014
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1032
 
        obsolete_pack_trans.put_bytes('a-pack.pack', b'content\n')
1033
 
        obsolete_pack_trans.put_bytes('a-pack.rix', b'content\n')
1034
 
        obsolete_pack_trans.put_bytes('a-pack.iix', b'content\n')
1035
 
        obsolete_pack_trans.put_bytes('another-pack.pack', b'foo\n')
1036
 
        obsolete_pack_trans.put_bytes('not-a-pack.rix', b'foo\n')
 
1015
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
 
1016
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
 
1017
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
 
1018
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
 
1019
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1037
1020
        res = packs._clear_obsolete_packs()
1038
1021
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1039
1022
        self.assertEqual([], obsolete_pack_trans.list_dir('.'))
1041
1024
    def test__clear_obsolete_packs_preserve(self):
1042
1025
        packs = self.get_packs()
1043
1026
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1044
 
        obsolete_pack_trans.put_bytes('a-pack.pack', b'content\n')
1045
 
        obsolete_pack_trans.put_bytes('a-pack.rix', b'content\n')
1046
 
        obsolete_pack_trans.put_bytes('a-pack.iix', b'content\n')
1047
 
        obsolete_pack_trans.put_bytes('another-pack.pack', b'foo\n')
1048
 
        obsolete_pack_trans.put_bytes('not-a-pack.rix', b'foo\n')
 
1027
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
 
1028
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
 
1029
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
 
1030
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
 
1031
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1049
1032
        res = packs._clear_obsolete_packs(preserve={'a-pack'})
1050
1033
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1051
1034
        self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
1079
1062
    def test_repr(self):
1080
1063
        packs = self.get_packs()
1081
1064
        self.assertContainsRe(repr(packs),
1082
 
                              'RepositoryPackCollection(.*Repository(.*))')
 
1065
            'RepositoryPackCollection(.*Repository(.*))')
1083
1066
 
1084
1067
    def test__obsolete_packs(self):
1085
1068
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1100
1083
                         sorted(packs._pack_transport.list_dir('.')))
1101
1084
        # names[0] should not be present in the index anymore
1102
1085
        self.assertEqual(names[1:],
1103
 
                         sorted({osutils.splitext(n)[0] for n in
1104
 
                                 packs._index_transport.list_dir('.')}))
 
1086
            sorted({osutils.splitext(n)[0] for n in
 
1087
                        packs._index_transport.list_dir('.')}))
1105
1088
 
1106
1089
    def test__obsolete_packs_missing_directory(self):
1107
1090
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1117
1100
                         sorted(packs._pack_transport.list_dir('.')))
1118
1101
        # names[0] should not be present in the index anymore
1119
1102
        self.assertEqual(names[1:],
1120
 
                         sorted({osutils.splitext(n)[0] for n in
1121
 
                                 packs._index_transport.list_dir('.')}))
 
1103
            sorted({osutils.splitext(n)[0] for n in
 
1104
                        packs._index_transport.list_dir('.')}))
1122
1105
 
1123
1106
    def test_pack_distribution_zero(self):
1124
1107
        packs = self.get_packs()
1132
1115
    def test_pack_distribution_one_to_nine(self):
1133
1116
        packs = self.get_packs()
1134
1117
        self.assertEqual([1],
1135
 
                         packs.pack_distribution(1))
 
1118
            packs.pack_distribution(1))
1136
1119
        self.assertEqual([1, 1],
1137
 
                         packs.pack_distribution(2))
 
1120
            packs.pack_distribution(2))
1138
1121
        self.assertEqual([1, 1, 1],
1139
 
                         packs.pack_distribution(3))
 
1122
            packs.pack_distribution(3))
1140
1123
        self.assertEqual([1, 1, 1, 1],
1141
 
                         packs.pack_distribution(4))
 
1124
            packs.pack_distribution(4))
1142
1125
        self.assertEqual([1, 1, 1, 1, 1],
1143
 
                         packs.pack_distribution(5))
 
1126
            packs.pack_distribution(5))
1144
1127
        self.assertEqual([1, 1, 1, 1, 1, 1],
1145
 
                         packs.pack_distribution(6))
 
1128
            packs.pack_distribution(6))
1146
1129
        self.assertEqual([1, 1, 1, 1, 1, 1, 1],
1147
 
                         packs.pack_distribution(7))
 
1130
            packs.pack_distribution(7))
1148
1131
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1],
1149
 
                         packs.pack_distribution(8))
 
1132
            packs.pack_distribution(8))
1150
1133
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1, 1],
1151
 
                         packs.pack_distribution(9))
 
1134
            packs.pack_distribution(9))
1152
1135
 
1153
1136
    def test_pack_distribution_stable_at_boundaries(self):
1154
1137
        """When there are multi-rev packs the counts are stable."""
1185
1168
    def test_plan_pack_operations_2010_combines_smallest_two(self):
1186
1169
        packs = self.get_packs()
1187
1170
        existing_packs = [(1999, "big"), (9, "medium"), (1, "single2"),
1188
 
                          (1, "single1")]
 
1171
            (1, "single1")]
1189
1172
        # rev count - 2010 -> 2x1000 + 1x10 (3)
1190
1173
        pack_operations = packs.plan_autopack_combinations(
1191
1174
            existing_packs, [1000, 1000, 10])
1258
1241
        inv_index = GraphIndex(packs._index_transport, name + '.iix', sizes[1])
1259
1242
        txt_index = GraphIndex(packs._index_transport, name + '.tix', sizes[2])
1260
1243
        sig_index = GraphIndex(packs._index_transport, name + '.six', sizes[3])
1261
 
        self.assertEqual(
1262
 
            pack_repo.ExistingPack(
1263
 
                packs._pack_transport, name, rev_index, inv_index, txt_index,
1264
 
                sig_index), pack_1)
 
1244
        self.assertEqual(pack_repo.ExistingPack(packs._pack_transport,
 
1245
            name, rev_index, inv_index, txt_index, sig_index), pack_1)
1265
1246
        # and the same instance should be returned on successive calls.
1266
1247
        self.assertTrue(pack_1 is packs.get_pack_by_name(name))
1267
1248
 
1279
1260
        self.assertTrue(packs.reload_pack_names())
1280
1261
        self.assertEqual(new_names, packs.names())
1281
1262
        # And the repository can access the new revision
1282
 
        self.assertEqual({rev4: (revs[-1],)}, r.get_parent_map([rev4]))
 
1263
        self.assertEqual({rev4:(revs[-1],)}, r.get_parent_map([rev4]))
1283
1264
        self.assertFalse(packs.reload_pack_names())
1284
1265
 
1285
1266
    def test_reload_pack_names_added_and_removed(self):
1292
1273
        self.assertEqual(names, packs.names())
1293
1274
        self.assertTrue(packs.reload_pack_names())
1294
1275
        self.assertEqual(new_names, packs.names())
1295
 
        self.assertEqual({revs[-1]: (revs[-2],)}, r.get_parent_map([revs[-1]]))
 
1276
        self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
1296
1277
        self.assertFalse(packs.reload_pack_names())
1297
1278
 
1298
1279
    def test_reload_pack_names_preserves_pending(self):
1306
1287
        r.start_write_group()
1307
1288
        self.addCleanup(r.abort_write_group)
1308
1289
        r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
1309
 
            (b'text', b'rev'), (), None, b'content\n')])
 
1290
            ('text', 'rev'), (), None, 'content\n')])
1310
1291
        new_pack = packs._new_pack
1311
1292
        self.assertTrue(new_pack.data_inserted())
1312
1293
        new_pack.finish()
1316
1297
        packs._remove_pack_from_memory(removed_pack)
1317
1298
        names = packs.names()
1318
1299
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1319
 
        new_names = {x[0] for x in new_nodes}
1320
 
        self.assertEqual(names, sorted([x[0] for x in all_nodes]))
 
1300
        new_names = {x[0][0] for x in new_nodes}
 
1301
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1321
1302
        self.assertEqual(set(names) - set(orig_names), new_names)
1322
1303
        self.assertEqual({new_pack.name}, new_names)
1323
1304
        self.assertEqual([to_remove_name],
1324
 
                         sorted([x[0] for x in deleted_nodes]))
 
1305
                         sorted([x[0][0] for x in deleted_nodes]))
1325
1306
        packs.reload_pack_names()
1326
1307
        reloaded_names = packs.names()
1327
1308
        self.assertEqual(orig_at_load, packs._packs_at_load)
1328
1309
        self.assertEqual(names, reloaded_names)
1329
1310
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1330
 
        new_names = {x[0] for x in new_nodes}
1331
 
        self.assertEqual(names, sorted([x[0] for x in all_nodes]))
 
1311
        new_names = {x[0][0] for x in new_nodes}
 
1312
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1332
1313
        self.assertEqual(set(names) - set(orig_names), new_names)
1333
1314
        self.assertEqual({new_pack.name}, new_names)
1334
1315
        self.assertEqual([to_remove_name],
1335
 
                         sorted([x[0] for x in deleted_nodes]))
 
1316
                         sorted([x[0][0] for x in deleted_nodes]))
1336
1317
 
1337
1318
    def test_autopack_obsoletes_new_pack(self):
1338
1319
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1340
1321
        packs.pack_distribution = lambda x: [10]
1341
1322
        r.start_write_group()
1342
1323
        r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
1343
 
            (b'bogus-rev',), (), None, b'bogus-content\n')])
 
1324
            ('bogus-rev',), (), None, 'bogus-content\n')])
1344
1325
        # This should trigger an autopack, which will combine everything into a
1345
1326
        # single pack file.
1346
 
        r.commit_write_group()
 
1327
        new_names = r.commit_write_group()
1347
1328
        names = packs.names()
1348
1329
        self.assertEqual(1, len(names))
1349
1330
        self.assertEqual([names[0] + '.pack'],
1355
1336
        # full-pack via the other repo which will cause us to re-evaluate and
1356
1337
        # decide we don't need to do anything
1357
1338
        orig_execute = packs._execute_pack_operations
1358
 
 
1359
1339
        def _munged_execute_pack_ops(*args, **kwargs):
1360
1340
            tree.branch.repository.pack()
1361
1341
            return orig_execute(*args, **kwargs)
1474
1454
            index_class=BTreeGraphIndex,
1475
1455
            use_chk_index=False)
1476
1456
        pack = pack_repo.NewPack(collection)
1477
 
        self.addCleanup(pack.abort)  # Make sure the write stream gets closed
 
1457
        self.addCleanup(pack.abort) # Make sure the write stream gets closed
1478
1458
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
1479
1459
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
1480
1460
        self.assertIsInstance(pack._hash, type(osutils.md5()))
1494
1474
        builder = self.make_branch_builder('.', format="1.9")
1495
1475
        builder.start_series()
1496
1476
        builder.build_snapshot(None, [
1497
 
            ('add', ('', b'root-id', 'directory', None)),
1498
 
            ('add', ('f', b'f-id', 'file', b'content\n'))],
 
1477
            ('add', ('', 'root-id', 'directory', None)),
 
1478
            ('add', ('f', 'f-id', 'file', 'content\n'))],
1499
1479
            revision_id=b'A')
1500
 
        builder.build_snapshot([b'A'],
1501
 
                               [('modify', ('f', b'new-content\n'))],
1502
 
                               revision_id=b'B')
1503
 
        builder.build_snapshot([b'B'],
1504
 
                               [('modify', ('f', b'third-content\n'))],
1505
 
                               revision_id=b'C')
1506
 
        builder.build_snapshot([b'C'],
1507
 
                               [('modify', ('f', b'fourth-content\n'))],
1508
 
                               revision_id=b'D')
 
1480
        builder.build_snapshot(['A'],
 
1481
            [('modify', ('f', 'new-content\n'))],
 
1482
            revision_id=b'B')
 
1483
        builder.build_snapshot(['B'],
 
1484
            [('modify', ('f', 'third-content\n'))],
 
1485
            revision_id=b'C')
 
1486
        builder.build_snapshot(['C'],
 
1487
            [('modify', ('f', 'fourth-content\n'))],
 
1488
            revision_id=b'D')
1509
1489
        b = builder.get_branch()
1510
1490
        b.lock_read()
1511
1491
        builder.finish_series()
1515
1495
        # ['D', 'C', 'B', 'A']
1516
1496
        packs = b.repository._pack_collection.packs
1517
1497
        packer = knitpack_repo.KnitPacker(b.repository._pack_collection,
1518
 
                                          packs, 'testing',
1519
 
                                          revision_ids=[b'B', b'C'])
 
1498
                                  packs, 'testing',
 
1499
                                  revision_ids=['B', 'C'])
1520
1500
        # Now, when we are copying the B & C revisions, their pack files should
1521
1501
        # be moved to the front of the stack
1522
1502
        # The new ordering moves B & C to the front of the .packs attribute,
1523
1503
        # and leaves the others in the original order.
1524
1504
        new_packs = [packs[1], packs[2], packs[0], packs[3]]
1525
 
        packer.pack()
 
1505
        new_pack = packer.pack()
1526
1506
        self.assertEqual(new_packs, packer.packs)
1527
1507
 
1528
1508
 
1535
1515
 
1536
1516
    def test_open_pack_will_optimise(self):
1537
1517
        packer = knitpack_repo.OptimisingKnitPacker(self.get_pack_collection(),
1538
 
                                                    [], '.test')
 
1518
                                            [], '.test')
1539
1519
        new_pack = packer.open_pack()
1540
 
        self.addCleanup(new_pack.abort)  # ensure cleanup
 
1520
        self.addCleanup(new_pack.abort) # ensure cleanup
1541
1521
        self.assertIsInstance(new_pack, pack_repo.NewPack)
1542
1522
        self.assertTrue(new_pack.revision_index._optimize_for_size)
1543
1523
        self.assertTrue(new_pack.inventory_index._optimize_for_size)
1551
1531
        builder = self.make_branch_builder('source')
1552
1532
        builder.start_series()
1553
1533
        builder.build_snapshot(None, [
1554
 
            ('add', ('', b'root-id', 'directory', None)),
1555
 
            ('add', ('file', b'file-id', 'file', b'content\n')),
 
1534
            ('add', ('', 'root-id', 'directory', None)),
 
1535
            ('add', ('file', 'file-id', 'file', 'content\n')),
1556
1536
            ], revision_id=b'A')
1557
 
        builder.build_snapshot([b'A'], [
1558
 
            ('add', ('dir', b'dir-id', 'directory', None))],
 
1537
        builder.build_snapshot(['A'], [
 
1538
            ('add', ('dir', 'dir-id', 'directory', None))],
1559
1539
            revision_id=b'B')
1560
 
        builder.build_snapshot([b'B'], [
1561
 
            ('modify', ('file', b'new content\n'))],
 
1540
        builder.build_snapshot(['B'], [
 
1541
            ('modify', ('file', 'new content\n'))],
1562
1542
            revision_id=b'C')
1563
1543
        builder.finish_series()
1564
1544
        return builder.get_branch()
1575
1555
                  pack_name_with_rev_C_content)
1576
1556
        """
1577
1557
        b_source = self.make_abc_branch()
1578
 
        b_base = b_source.controldir.sprout(
1579
 
            'base', revision_id=b'A').open_branch()
1580
 
        b_stacked = b_base.controldir.sprout(
1581
 
            'stacked', stacked=True).open_branch()
 
1558
        b_base = b_source.controldir.sprout('base', revision_id=b'A').open_branch()
 
1559
        b_stacked = b_base.controldir.sprout('stacked', stacked=True).open_branch()
1582
1560
        b_stacked.lock_write()
1583
1561
        self.addCleanup(b_stacked.unlock)
1584
 
        b_stacked.fetch(b_source, b'B')
 
1562
        b_stacked.fetch(b_source, 'B')
1585
1563
        # Now re-open the stacked repo directly (no fallbacks) so that we can
1586
1564
        # fill in the A rev.
1587
1565
        repo_not_stacked = b_stacked.controldir.open_repository()
1589
1567
        self.addCleanup(repo_not_stacked.unlock)
1590
1568
        # Now we should have a pack file with A's inventory, but not its
1591
1569
        # Revision
1592
 
        self.assertEqual([(b'A',), (b'B',)],
 
1570
        self.assertEqual([('A',), ('B',)],
1593
1571
                         sorted(repo_not_stacked.inventories.keys()))
1594
 
        self.assertEqual([(b'B',)],
 
1572
        self.assertEqual([('B',)],
1595
1573
                         sorted(repo_not_stacked.revisions.keys()))
1596
1574
        stacked_pack_names = repo_not_stacked._pack_collection.names()
1597
1575
        # We have a couple names here, figure out which has A's inventory
1598
1576
        for name in stacked_pack_names:
1599
1577
            pack = repo_not_stacked._pack_collection.get_pack_by_name(name)
1600
1578
            keys = [n[1] for n in pack.inventory_index.iter_all_entries()]
1601
 
            if (b'A',) in keys:
 
1579
            if ('A',) in keys:
1602
1580
                inv_a_pack_name = name
1603
1581
                break
1604
1582
        else:
1605
1583
            self.fail('Could not find pack containing A\'s inventory')
1606
 
        repo_not_stacked.fetch(b_source.repository, b'A')
1607
 
        self.assertEqual([(b'A',), (b'B',)],
 
1584
        repo_not_stacked.fetch(b_source.repository, 'A')
 
1585
        self.assertEqual([('A',), ('B',)],
1608
1586
                         sorted(repo_not_stacked.revisions.keys()))
1609
1587
        new_pack_names = set(repo_not_stacked._pack_collection.names())
1610
1588
        rev_a_pack_names = new_pack_names.difference(stacked_pack_names)
1611
1589
        self.assertEqual(1, len(rev_a_pack_names))
1612
1590
        rev_a_pack_name = list(rev_a_pack_names)[0]
1613
1591
        # Now fetch 'C', so we have a couple pack files to join
1614
 
        repo_not_stacked.fetch(b_source.repository, b'C')
 
1592
        repo_not_stacked.fetch(b_source.repository, 'C')
1615
1593
        rev_c_pack_names = set(repo_not_stacked._pack_collection.names())
1616
1594
        rev_c_pack_names = rev_c_pack_names.difference(new_pack_names)
1617
1595
        self.assertEqual(1, len(rev_c_pack_names))
1629
1607
        a_pack = repo._pack_collection.get_pack_by_name(rev_a_pack_name)
1630
1608
        c_pack = repo._pack_collection.get_pack_by_name(rev_c_pack_name)
1631
1609
        packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
1632
 
                                                [a_pack, c_pack], '.test-pack')
 
1610
                    [a_pack, c_pack], '.test-pack')
1633
1611
        # This would raise ValueError in bug #437003, but should not raise an
1634
1612
        # error once fixed.
1635
1613
        packer.pack()
1642
1620
        inv_a_pack = repo._pack_collection.get_pack_by_name(inv_a_pack_name)
1643
1621
        repo._pack_collection._remove_pack_from_memory(inv_a_pack)
1644
1622
        packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
1645
 
                                                repo._pack_collection.all_packs(), '.test-pack')
 
1623
            repo._pack_collection.all_packs(), '.test-pack')
1646
1624
        e = self.assertRaises(ValueError, packer.pack)
1647
1625
        packer.new_pack.abort()
1648
1626
        self.assertContainsRe(str(e),
1649
 
                              r"We are missing inventories for revisions: .*'A'")
 
1627
            r"We are missing inventories for revisions: .*'A'")
1650
1628
 
1651
1629
 
1652
1630
class TestCrossFormatPacks(TestCaseWithTransport):
1687
1665
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
1688
1666
        source_tree.lock_write()
1689
1667
        self.addCleanup(source_tree.unlock)
1690
 
        source_tree.commit('foo')
 
1668
        tip = source_tree.commit('foo')
1691
1669
        target = self.make_repository('target', format=target_fmt)
1692
1670
        target.lock_write()
1693
1671
        self.addCleanup(target.unlock)
1756
1734
        repo.lock_write()
1757
1735
        repo._format.features[b"makes-cheese-sandwich"] = b"required"
1758
1736
        self.assertRaises(bzrdir.MissingFeature,
1759
 
                          repo._format.check_support_status, False)
 
1737
            repo._format.check_support_status, False)