/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/tests/test_repository.py

  • Committer: Jelmer Vernooij
  • Date: 2018-02-18 15:21:06 UTC
  • mto: This revision was merged to the branch mainline in revision 6928.
  • Revision ID: jelmer@jelmer.uk-20180218152106-m8bmfurzlspweyu4
Yet more bees.

Show diffs side-by-side

added added

removed removed

Lines of Context:
253
253
        tree = control.create_workingtree()
254
254
        tree.add(['foo'], ['Nasty-IdC:'], ['file'])
255
255
        tree.put_file_bytes_non_atomic('foo', '')
256
 
        tree.commit('1st post', rev_id='foo')
 
256
        tree.commit('1st post', rev_id=b'foo')
257
257
        self.assertHasKnit(t, 'knits/e8/%254easty-%2549d%2543%253a',
258
258
            '\nfoo fulltext 0 81  :')
259
259
 
521
521
            revision_tree.get_file_lines(u'', revision_tree.get_root_id())
522
522
        finally:
523
523
            revision_tree.unlock()
524
 
        tree.commit("Another dull commit", rev_id='dull2')
 
524
        tree.commit("Another dull commit", rev_id=b'dull2')
525
525
        revision_tree = tree.branch.repository.revision_tree('dull2')
526
526
        revision_tree.lock_read()
527
527
        self.addCleanup(revision_tree.unlock)
541
541
        mt = self.make_branch_and_memory_tree('test', format='2a')
542
542
        mt.lock_write()
543
543
        self.addCleanup(mt.unlock)
544
 
        mt.add([''], ['root-id'])
 
544
        mt.add([''], [b'root-id'])
545
545
        mt.commit('first')
546
546
        index = mt.branch.repository.chk_bytes._index._graph_index._indices[0]
547
547
        self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
556
556
        builder = self.make_branch_builder('source', format='2a')
557
557
        builder.start_series()
558
558
        builder.build_snapshot(None, [
559
 
            ('add', ('', 'root-id', 'directory', '')),
560
 
            ('add', ('file', 'file-id', 'file', 'content\n'))],
561
 
            revision_id='1')
562
 
        builder.build_snapshot(['1'], [
563
 
            ('modify', ('file-id', 'content-2\n'))],
564
 
            revision_id='2')
565
 
        builder.finish_series()
566
 
        source = builder.get_branch()
567
 
        target = self.make_repository('target', format='2a')
568
 
        target.fetch(source.repository)
569
 
        target.lock_read()
570
 
        self.addCleanup(target.unlock)
571
 
        details = target.texts._index.get_build_details(
572
 
            [('file-id', '1',), ('file-id', '2',)])
573
 
        file_1_details = details[('file-id', '1')]
574
 
        file_2_details = details[('file-id', '2')]
575
 
        # The index, and what to read off disk, should be the same for both
576
 
        # versions of the file.
577
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
578
 
 
579
 
    def test_fetch_combines_groups(self):
580
 
        builder = self.make_branch_builder('source', format='2a')
581
 
        builder.start_series()
582
 
        builder.build_snapshot(None, [
583
 
            ('add', ('', 'root-id', 'directory', '')),
584
 
            ('add', ('file', 'file-id', 'file', 'content\n'))],
585
 
            revision_id='1')
586
 
        builder.build_snapshot(['1'], [
587
 
            ('modify', ('file-id', 'content-2\n'))],
588
 
            revision_id='2')
589
 
        builder.finish_series()
590
 
        source = builder.get_branch()
591
 
        target = self.make_repository('target', format='2a')
592
 
        target.fetch(source.repository)
593
 
        target.lock_read()
594
 
        self.addCleanup(target.unlock)
595
 
        details = target.texts._index.get_build_details(
596
 
            [('file-id', '1',), ('file-id', '2',)])
597
 
        file_1_details = details[('file-id', '1')]
598
 
        file_2_details = details[('file-id', '2')]
599
 
        # The index, and what to read off disk, should be the same for both
600
 
        # versions of the file.
601
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
602
 
 
603
 
    def test_fetch_combines_groups(self):
604
 
        builder = self.make_branch_builder('source', format='2a')
605
 
        builder.start_series()
606
 
        builder.build_snapshot(None, [
607
 
            ('add', ('', 'root-id', 'directory', '')),
608
 
            ('add', ('file', 'file-id', 'file', 'content\n'))],
609
 
            revision_id='1')
610
 
        builder.build_snapshot(['1'], [
611
 
            ('modify', ('file-id', 'content-2\n'))],
612
 
            revision_id='2')
613
 
        builder.finish_series()
614
 
        source = builder.get_branch()
615
 
        target = self.make_repository('target', format='2a')
616
 
        target.fetch(source.repository)
617
 
        target.lock_read()
618
 
        self.addCleanup(target.unlock)
619
 
        details = target.texts._index.get_build_details(
620
 
            [('file-id', '1',), ('file-id', '2',)])
621
 
        file_1_details = details[('file-id', '1')]
622
 
        file_2_details = details[('file-id', '2')]
 
559
            ('add', ('', b'root-id', 'directory', '')),
 
560
            ('add', ('file', b'file-id', 'file', 'content\n'))],
 
561
            revision_id=b'1')
 
562
        builder.build_snapshot(['1'], [
 
563
            ('modify', (b'file-id', 'content-2\n'))],
 
564
            revision_id=b'2')
 
565
        builder.finish_series()
 
566
        source = builder.get_branch()
 
567
        target = self.make_repository('target', format='2a')
 
568
        target.fetch(source.repository)
 
569
        target.lock_read()
 
570
        self.addCleanup(target.unlock)
 
571
        details = target.texts._index.get_build_details(
 
572
            [(b'file-id', '1',), (b'file-id', '2',)])
 
573
        file_1_details = details[(b'file-id', '1')]
 
574
        file_2_details = details[(b'file-id', '2')]
 
575
        # The index, and what to read off disk, should be the same for both
 
576
        # versions of the file.
 
577
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
578
 
 
579
    def test_fetch_combines_groups(self):
 
580
        builder = self.make_branch_builder('source', format='2a')
 
581
        builder.start_series()
 
582
        builder.build_snapshot(None, [
 
583
            ('add', ('', b'root-id', 'directory', '')),
 
584
            ('add', ('file', b'file-id', 'file', 'content\n'))],
 
585
            revision_id=b'1')
 
586
        builder.build_snapshot([b'1'], [
 
587
            ('modify', (b'file-id', 'content-2\n'))],
 
588
            revision_id=b'2')
 
589
        builder.finish_series()
 
590
        source = builder.get_branch()
 
591
        target = self.make_repository('target', format='2a')
 
592
        target.fetch(source.repository)
 
593
        target.lock_read()
 
594
        self.addCleanup(target.unlock)
 
595
        details = target.texts._index.get_build_details(
 
596
            [(b'file-id', '1',), (b'file-id', '2',)])
 
597
        file_1_details = details[(b'file-id', '1')]
 
598
        file_2_details = details[(b'file-id', '2')]
 
599
        # The index, and what to read off disk, should be the same for both
 
600
        # versions of the file.
 
601
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
602
 
 
603
    def test_fetch_combines_groups(self):
 
604
        builder = self.make_branch_builder('source', format='2a')
 
605
        builder.start_series()
 
606
        builder.build_snapshot(None, [
 
607
            ('add', ('', b'root-id', 'directory', '')),
 
608
            ('add', ('file', b'file-id', 'file', 'content\n'))],
 
609
            revision_id=b'1')
 
610
        builder.build_snapshot([b'1'], [
 
611
            ('modify', (b'file-id', 'content-2\n'))],
 
612
            revision_id=b'2')
 
613
        builder.finish_series()
 
614
        source = builder.get_branch()
 
615
        target = self.make_repository('target', format='2a')
 
616
        target.fetch(source.repository)
 
617
        target.lock_read()
 
618
        self.addCleanup(target.unlock)
 
619
        details = target.texts._index.get_build_details(
 
620
            [(b'file-id', '1',), (b'file-id', '2',)])
 
621
        file_1_details = details[(b'file-id', '1')]
 
622
        file_2_details = details[(b'file-id', '2')]
623
623
        # The index, and what to read off disk, should be the same for both
624
624
        # versions of the file.
625
625
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
631
631
    def test_inventories_use_chk_map_with_parent_base_dict(self):
632
632
        tree = self.make_branch_and_memory_tree('repo', format="2a")
633
633
        tree.lock_write()
634
 
        tree.add([''], ['TREE_ROOT'])
 
634
        tree.add([''], [b'TREE_ROOT'])
635
635
        revid = tree.commit("foo")
636
636
        tree.unlock()
637
637
        tree.lock_read()
652
652
        tree = self.make_branch_and_memory_tree('tree', format='2a')
653
653
        tree.lock_write()
654
654
        self.addCleanup(tree.unlock)
655
 
        tree.add([''], ['TREE_ROOT'])
 
655
        tree.add([''], [b'TREE_ROOT'])
656
656
        for pos in range(20):
657
657
            tree.commit(str(pos))
658
658
 
660
660
        tree = self.make_branch_and_memory_tree('tree', format='2a')
661
661
        tree.lock_write()
662
662
        self.addCleanup(tree.unlock)
663
 
        tree.add([''], ['TREE_ROOT'])
 
663
        tree.add([''], [b'TREE_ROOT'])
664
664
        # 1 commit to leave untouched
665
665
        tree.commit('1')
666
666
        to_keep = tree.branch.repository._pack_collection.names()
696
696
                            format='2a')
697
697
        # We have to build a fairly large tree, so that we are sure the chk
698
698
        # pages will have split into multiple pages.
699
 
        entries = [('add', ('', 'a-root-id', 'directory', None))]
 
699
        entries = [('add', ('', b'a-root-id', 'directory', None))]
700
700
        for i in 'abcdefghijklmnopqrstuvwxyz123456789':
701
701
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
702
702
                fname = i + j
703
 
                fid = fname + '-id'
 
703
                fid = fname.encode('utf-8') + b'-id'
704
704
                content = 'content for %s\n' % (fname,)
705
705
                entries.append(('add', (fname, fid, 'file', content)))
706
706
        source_builder.start_series()
707
 
        source_builder.build_snapshot(None, entries, revision_id='rev-1')
 
707
        source_builder.build_snapshot(None, entries, revision_id=b'rev-1')
708
708
        # Now change a few of them, so we get a few new pages for the second
709
709
        # revision
710
 
        source_builder.build_snapshot(['rev-1'], [
711
 
            ('modify', ('aa-id', 'new content for aa-id\n')),
712
 
            ('modify', ('cc-id', 'new content for cc-id\n')),
713
 
            ('modify', ('zz-id', 'new content for zz-id\n')),
714
 
            ], revision_id='rev-2')
 
710
        source_builder.build_snapshot([b'rev-1'], [
 
711
            ('modify', (b'aa-id', 'new content for aa-id\n')),
 
712
            ('modify', (b'cc-id', 'new content for cc-id\n')),
 
713
            ('modify', (b'zz-id', 'new content for zz-id\n')),
 
714
            ], revision_id=b'rev-2')
715
715
        source_builder.finish_series()
716
716
        source_branch = source_builder.get_branch()
717
717
        source_branch.lock_read()
722
722
 
723
723
        # On a regular pass, getting the inventories and chk pages for rev-2
724
724
        # would only get the newly created chk pages
725
 
        search = vf_search.SearchResult({'rev-2'}, {'rev-1'}, 1,
726
 
                                    {'rev-2'})
 
725
        search = vf_search.SearchResult({b'rev-2'}, {b'rev-1'}, 1,
 
726
                                    {b'rev-2'})
727
727
        simple_chk_records = []
728
728
        for vf_name, substream in source.get_stream(search):
729
729
            if vf_name == 'chk_bytes':