/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/tests/per_versionedfile.py

  • Committer: Jelmer Vernooij
  • Date: 2017-12-21 16:44:19 UTC
  • mto: This revision was merged to the branch mainline in revision 6842.
  • Revision ID: jelmer@jelmer.uk-20171221164419-wn90kwu2uismpznf
Revert custom gmtime implementation without tests.

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006-2010 Canonical Ltd
 
1
# Copyright (C) 2006-2012, 2016 Canonical Ltd
2
2
#
3
3
# Authors:
4
4
#   Johan Rydberg <jrydberg@gnu.org>
21
21
# TODO: might be nice to create a versionedfile with some type of corruption
22
22
# considered typical and check that it can be detected/corrected.
23
23
 
24
 
from itertools import chain, izip
25
 
from StringIO import StringIO
 
24
from gzip import GzipFile
 
25
import itertools
26
26
 
27
 
from bzrlib import (
 
27
from .. import (
28
28
    errors,
29
29
    graph as _mod_graph,
30
 
    groupcompress,
31
 
    knit as _mod_knit,
32
30
    osutils,
33
31
    progress,
 
32
    transport,
34
33
    ui,
35
34
    )
36
 
from bzrlib.errors import (
37
 
                           RevisionNotPresent,
38
 
                           RevisionAlreadyPresent,
39
 
                           WeaveParentMismatch
40
 
                           )
41
 
from bzrlib.knit import (
 
35
from ..bzr import (
 
36
    groupcompress,
 
37
    knit as _mod_knit,
 
38
    )
 
39
from ..errors import (
 
40
    RevisionNotPresent,
 
41
    RevisionAlreadyPresent,
 
42
    )
 
43
from ..bzr.knit import (
42
44
    cleanup_pack_knit,
43
45
    make_file_factory,
44
46
    make_pack_factory,
45
 
    KnitAnnotateFactory,
46
 
    KnitPlainFactory,
47
 
    )
48
 
from bzrlib.tests import (
 
47
    )
 
48
from ..sixish import (
 
49
    BytesIO,
 
50
    zip,
 
51
    )
 
52
from . import (
49
53
    TestCase,
50
54
    TestCaseWithMemoryTransport,
51
55
    TestNotApplicable,
52
56
    TestSkipped,
53
 
    condition_isinstance,
54
 
    split_suite_by_condition,
55
 
    multiply_tests,
56
57
    )
57
 
from bzrlib.tests.http_utils import TestCaseWithWebserver
58
 
from bzrlib.trace import mutter
59
 
from bzrlib.transport import get_transport
60
 
from bzrlib.transport.memory import MemoryTransport
61
 
from bzrlib.tsort import topo_sort
62
 
from bzrlib.tuned_gzip import GzipFile
63
 
import bzrlib.versionedfile as versionedfile
64
 
from bzrlib.versionedfile import (
 
58
from .http_utils import TestCaseWithWebserver
 
59
from ..transport.memory import MemoryTransport
 
60
from ..bzr import versionedfile as versionedfile
 
61
from ..bzr.versionedfile import (
65
62
    ConstantMapper,
66
63
    HashEscapedPrefixMapper,
67
64
    PrefixMapper,
68
65
    VirtualVersionedFiles,
69
66
    make_versioned_files_factory,
70
67
    )
71
 
from bzrlib.weave import WeaveFile
72
 
from bzrlib.weavefile import read_weave, write_weave
73
 
 
74
 
 
75
 
def load_tests(standard_tests, module, loader):
76
 
    """Parameterize VersionedFiles tests for different implementations."""
77
 
    to_adapt, result = split_suite_by_condition(
78
 
        standard_tests, condition_isinstance(TestVersionedFiles))
79
 
    # We want to be sure of behaviour for:
80
 
    # weaves prefix layout (weave texts)
81
 
    # individually named weaves (weave inventories)
82
 
    # annotated knits - prefix|hash|hash-escape layout, we test the third only
83
 
    #                   as it is the most complex mapper.
84
 
    # individually named knits
85
 
    # individual no-graph knits in packs (signatures)
86
 
    # individual graph knits in packs (inventories)
87
 
    # individual graph nocompression knits in packs (revisions)
88
 
    # plain text knits in packs (texts)
89
 
    len_one_scenarios = [
90
 
        ('weave-named', {
91
 
            'cleanup':None,
92
 
            'factory':make_versioned_files_factory(WeaveFile,
93
 
                ConstantMapper('inventory')),
94
 
            'graph':True,
95
 
            'key_length':1,
96
 
            'support_partial_insertion': False,
97
 
            }),
98
 
        ('named-knit', {
99
 
            'cleanup':None,
100
 
            'factory':make_file_factory(False, ConstantMapper('revisions')),
101
 
            'graph':True,
102
 
            'key_length':1,
103
 
            'support_partial_insertion': False,
104
 
            }),
105
 
        ('named-nograph-nodelta-knit-pack', {
106
 
            'cleanup':cleanup_pack_knit,
107
 
            'factory':make_pack_factory(False, False, 1),
108
 
            'graph':False,
109
 
            'key_length':1,
110
 
            'support_partial_insertion': False,
111
 
            }),
112
 
        ('named-graph-knit-pack', {
113
 
            'cleanup':cleanup_pack_knit,
114
 
            'factory':make_pack_factory(True, True, 1),
115
 
            'graph':True,
116
 
            'key_length':1,
117
 
            'support_partial_insertion': True,
118
 
            }),
119
 
        ('named-graph-nodelta-knit-pack', {
120
 
            'cleanup':cleanup_pack_knit,
121
 
            'factory':make_pack_factory(True, False, 1),
122
 
            'graph':True,
123
 
            'key_length':1,
124
 
            'support_partial_insertion': False,
125
 
            }),
126
 
        ('groupcompress-nograph', {
127
 
            'cleanup':groupcompress.cleanup_pack_group,
128
 
            'factory':groupcompress.make_pack_factory(False, False, 1),
129
 
            'graph': False,
130
 
            'key_length':1,
131
 
            'support_partial_insertion':False,
132
 
            }),
133
 
        ]
134
 
    len_two_scenarios = [
135
 
        ('weave-prefix', {
136
 
            'cleanup':None,
137
 
            'factory':make_versioned_files_factory(WeaveFile,
138
 
                PrefixMapper()),
139
 
            'graph':True,
140
 
            'key_length':2,
141
 
            'support_partial_insertion': False,
142
 
            }),
143
 
        ('annotated-knit-escape', {
144
 
            'cleanup':None,
145
 
            'factory':make_file_factory(True, HashEscapedPrefixMapper()),
146
 
            'graph':True,
147
 
            'key_length':2,
148
 
            'support_partial_insertion': False,
149
 
            }),
150
 
        ('plain-knit-pack', {
151
 
            'cleanup':cleanup_pack_knit,
152
 
            'factory':make_pack_factory(True, True, 2),
153
 
            'graph':True,
154
 
            'key_length':2,
155
 
            'support_partial_insertion': True,
156
 
            }),
157
 
        ('groupcompress', {
158
 
            'cleanup':groupcompress.cleanup_pack_group,
159
 
            'factory':groupcompress.make_pack_factory(True, False, 1),
160
 
            'graph': True,
161
 
            'key_length':1,
162
 
            'support_partial_insertion':False,
163
 
            }),
164
 
        ]
165
 
    scenarios = len_one_scenarios + len_two_scenarios
166
 
    return multiply_tests(to_adapt, scenarios, result)
 
68
from ..bzr.weave import (
 
69
    WeaveFile,
 
70
    WeaveInvalidChecksum,
 
71
    )
 
72
from ..bzr.weavefile import write_weave
 
73
from .scenarios import load_tests_apply_scenarios
 
74
 
 
75
 
 
76
load_tests = load_tests_apply_scenarios
167
77
 
168
78
 
169
79
def get_diamond_vf(f, trailing_eol=True, left_only=False):
280
190
            versions = f.versions()
281
191
            self.assertTrue('r0' in versions)
282
192
            self.assertTrue('r1' in versions)
283
 
            self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
284
 
            self.assertEquals(f.get_text('r0'), 'a\nb\n')
285
 
            self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
 
193
            self.assertEqual(f.get_lines('r0'), ['a\n', 'b\n'])
 
194
            self.assertEqual(f.get_text('r0'), 'a\nb\n')
 
195
            self.assertEqual(f.get_lines('r1'), ['b\n', 'c\n'])
286
196
            self.assertEqual(2, len(f))
287
197
            self.assertEqual(2, f.num_versions())
288
198
 
314
224
            self.assertTrue('r0' in versions)
315
225
            self.assertTrue('r1' in versions)
316
226
            self.assertTrue('r2' in versions)
317
 
            self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
318
 
            self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
319
 
            self.assertEquals(f.get_lines('r2'), ['c\n', 'd\n'])
 
227
            self.assertEqual(f.get_lines('r0'), ['a\n', 'b\n'])
 
228
            self.assertEqual(f.get_lines('r1'), ['b\n', 'c\n'])
 
229
            self.assertEqual(f.get_lines('r2'), ['c\n', 'd\n'])
320
230
            self.assertEqual(3, f.num_versions())
321
231
            origins = f.annotate('r1')
322
 
            self.assertEquals(origins[0][0], 'r0')
323
 
            self.assertEquals(origins[1][0], 'r1')
 
232
            self.assertEqual(origins[0][0], 'r0')
 
233
            self.assertEqual(origins[1][0], 'r1')
324
234
            origins = f.annotate('r2')
325
 
            self.assertEquals(origins[0][0], 'r1')
326
 
            self.assertEquals(origins[1][0], 'r2')
 
235
            self.assertEqual(origins[0][0], 'r1')
 
236
            self.assertEqual(origins[1][0], 'r2')
327
237
 
328
238
        verify_file(f)
329
239
        f = self.reopen_file()
503
413
 
504
414
    def test_add_lines_with_matching_blocks_noeol_last_line(self):
505
415
        """Add a text with an unchanged last line with no eol should work."""
506
 
        from bzrlib import multiparent
 
416
        from breezy import multiparent
507
417
        # Hand verified sha1 of the text we're adding.
508
418
        sha1 = '6a1d115ec7b60afb664dc14890b5af5ce3c827a4'
509
419
        # Create a mpdiff which adds a new line before the trailing line, and
524
434
        self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
525
435
 
526
436
    def test_make_mpdiffs(self):
527
 
        from bzrlib import multiparent
 
437
        from breezy import multiparent
528
438
        vf = self.get_file('foo')
529
439
        sha1s = self._setup_for_deltas(vf)
530
440
        new_vf = self.get_file('bar')
572
482
        next_parent = 'base'
573
483
        text_name = 'chain1-'
574
484
        text = ['line\n']
575
 
        sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
576
 
                 1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
577
 
                 2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
578
 
                 3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
579
 
                 4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
580
 
                 5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
581
 
                 6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
582
 
                 7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
583
 
                 8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
584
 
                 9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
585
 
                 10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
586
 
                 11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
587
 
                 12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
588
 
                 13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
589
 
                 14:'2c4b1736566b8ca6051e668de68650686a3922f2',
590
 
                 15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
591
 
                 16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
592
 
                 17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
593
 
                 18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
594
 
                 19:'1ebed371807ba5935958ad0884595126e8c4e823',
595
 
                 20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
596
 
                 21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
597
 
                 22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
598
 
                 23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
599
 
                 24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
600
 
                 25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
 
485
        sha1s = {0: 'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
 
486
                 1: '45e21ea146a81ea44a821737acdb4f9791c8abe7',
 
487
                 2: 'e1f11570edf3e2a070052366c582837a4fe4e9fa',
 
488
                 3: '26b4b8626da827088c514b8f9bbe4ebf181edda1',
 
489
                 4: 'e28a5510be25ba84d31121cff00956f9970ae6f6',
 
490
                 5: 'd63ec0ce22e11dcf65a931b69255d3ac747a318d',
 
491
                 6: '2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
 
492
                 7: '95c14da9cafbf828e3e74a6f016d87926ba234ab',
 
493
                 8: '779e9a0b28f9f832528d4b21e17e168c67697272',
 
494
                 9: '1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
 
495
                 10: '131a2ae712cf51ed62f143e3fbac3d4206c25a05',
 
496
                 11: 'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
 
497
                 12: '31a2286267f24d8bedaa43355f8ad7129509ea85',
 
498
                 13: 'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
 
499
                 14: '2c4b1736566b8ca6051e668de68650686a3922f2',
 
500
                 15: '5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
 
501
                 16: 'b0d2e18d3559a00580f6b49804c23fea500feab3',
 
502
                 17: '8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
 
503
                 18: '5cf64a3459ae28efa60239e44b20312d25b253f3',
 
504
                 19: '1ebed371807ba5935958ad0884595126e8c4e823',
 
505
                 20: '2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
 
506
                 21: '01edc447978004f6e4e962b417a4ae1955b6fe5d',
 
507
                 22: 'd8d8dc49c4bf0bab401e0298bb5ad827768618bb',
 
508
                 23: 'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
 
509
                 24: 'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
 
510
                 25: 'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
601
511
                 }
602
512
        for depth in range(26):
603
513
            new_version = text_name + '%s' % depth
693
603
        f.add_lines('r0', [], ['a\n', 'b\n'])
694
604
        f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
695
605
        origins = f.annotate('r1')
696
 
        self.assertEquals(origins[0][0], 'r1')
697
 
        self.assertEquals(origins[1][0], 'r0')
 
606
        self.assertEqual(origins[0][0], 'r1')
 
607
        self.assertEqual(origins[1][0], 'r0')
698
608
 
699
609
        self.assertRaises(RevisionNotPresent,
700
610
            f.annotate, 'foo')
709
619
        w = self.get_file_corrupted_text()
710
620
 
711
621
        self.assertEqual('hello\n', w.get_text('v1'))
712
 
        self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
713
 
        self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
714
 
        self.assertRaises(errors.WeaveInvalidChecksum, w.check)
 
622
        self.assertRaises(WeaveInvalidChecksum, w.get_text, 'v2')
 
623
        self.assertRaises(WeaveInvalidChecksum, w.get_lines, 'v2')
 
624
        self.assertRaises(WeaveInvalidChecksum, w.check)
715
625
 
716
626
        w = self.get_file_corrupted_checksum()
717
627
 
718
628
        self.assertEqual('hello\n', w.get_text('v1'))
719
 
        self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
720
 
        self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
721
 
        self.assertRaises(errors.WeaveInvalidChecksum, w.check)
 
629
        self.assertRaises(WeaveInvalidChecksum, w.get_text, 'v2')
 
630
        self.assertRaises(WeaveInvalidChecksum, w.get_lines, 'v2')
 
631
        self.assertRaises(WeaveInvalidChecksum, w.check)
722
632
 
723
633
    def get_file_corrupted_text(self):
724
634
        """Return a versioned file with corrupt text but valid metadata."""
844
754
                                 ['base', 'a_ghost'],
845
755
                                 ['line\n', 'line_b\n', 'line_c\n'])
846
756
        origins = vf.annotate('references_ghost')
847
 
        self.assertEquals(('base', 'line\n'), origins[0])
848
 
        self.assertEquals(('base', 'line_b\n'), origins[1])
849
 
        self.assertEquals(('references_ghost', 'line_c\n'), origins[2])
 
757
        self.assertEqual(('base', 'line\n'), origins[0])
 
758
        self.assertEqual(('base', 'line_b\n'), origins[1])
 
759
        self.assertEqual(('references_ghost', 'line_c\n'), origins[2])
850
760
 
851
761
    def test_readonly_mode(self):
852
 
        transport = get_transport(self.get_url('.'))
 
762
        t = self.get_transport()
853
763
        factory = self.get_factory()
854
 
        vf = factory('id', transport, 0777, create=True, access_mode='w')
855
 
        vf = factory('id', transport, access_mode='r')
 
764
        vf = factory('id', t, 0o777, create=True, access_mode='w')
 
765
        vf = factory('id', t, access_mode='r')
856
766
        self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
857
767
        self.assertRaises(errors.ReadOnlyError,
858
768
                          vf.add_lines_with_ghosts,
880
790
class TestWeave(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
881
791
 
882
792
    def get_file(self, name='foo'):
883
 
        return WeaveFile(name, get_transport(self.get_url('.')), create=True,
884
 
            get_scope=self.get_transaction)
 
793
        return WeaveFile(name, self.get_transport(),
 
794
                         create=True,
 
795
                         get_scope=self.get_transaction)
885
796
 
886
797
    def get_file_corrupted_text(self):
887
 
        w = WeaveFile('foo', get_transport(self.get_url('.')), create=True,
888
 
            get_scope=self.get_transaction)
 
798
        w = WeaveFile('foo', self.get_transport(),
 
799
                      create=True,
 
800
                      get_scope=self.get_transaction)
889
801
        w.add_lines('v1', [], ['hello\n'])
890
802
        w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
891
803
 
919
831
        return w
920
832
 
921
833
    def reopen_file(self, name='foo', create=False):
922
 
        return WeaveFile(name, get_transport(self.get_url('.')), create=create,
923
 
            get_scope=self.get_transaction)
 
834
        return WeaveFile(name, self.get_transport(),
 
835
                         create=create,
 
836
                         get_scope=self.get_transaction)
924
837
 
925
838
    def test_no_implicit_create(self):
926
839
        self.assertRaises(errors.NoSuchFile,
927
840
                          WeaveFile,
928
841
                          'foo',
929
 
                          get_transport(self.get_url('.')),
 
842
                          self.get_transport(),
930
843
                          get_scope=self.get_transaction)
931
844
 
932
845
    def get_factory(self):
936
849
class TestPlanMergeVersionedFile(TestCaseWithMemoryTransport):
937
850
 
938
851
    def setUp(self):
939
 
        TestCaseWithMemoryTransport.setUp(self)
 
852
        super(TestPlanMergeVersionedFile, self).setUp()
940
853
        mapper = PrefixMapper()
941
854
        factory = make_file_factory(True, mapper)
942
855
        self.vf1 = factory(self.get_transport('root-1'))
967
880
            self.plan_merge_vf.get_parent_map([('root', 'B')]))
968
881
        self.assertEqual({('root', 'D'):(('root', 'C'),)},
969
882
            self.plan_merge_vf.get_parent_map([('root', 'D')]))
970
 
        self.assertEqual({('root', 'E:'):(('root', 'B'),('root', 'D'))},
 
883
        self.assertEqual({('root', 'E:'):(('root', 'B'), ('root', 'D'))},
971
884
            self.plan_merge_vf.get_parent_map([('root', 'E:')]))
972
885
        self.assertEqual({},
973
886
            self.plan_merge_vf.get_parent_map([('root', 'F')]))
974
887
        self.assertEqual({
975
 
                ('root', 'B'):(('root', 'A'),),
976
 
                ('root', 'D'):(('root', 'C'),),
977
 
                ('root', 'E:'):(('root', 'B'),('root', 'D')),
 
888
                ('root', 'B'): (('root', 'A'),),
 
889
                ('root', 'D'): (('root', 'C'),),
 
890
                ('root', 'E:'): (('root', 'B'), ('root', 'D')),
978
891
                },
979
892
            self.plan_merge_vf.get_parent_map(
980
893
                [('root', 'B'), ('root', 'D'), ('root', 'E:'), ('root', 'F')]))
982
895
    def test_get_record_stream(self):
983
896
        self.setup_abcde()
984
897
        def get_record(suffix):
985
 
            return self.plan_merge_vf.get_record_stream(
986
 
                [('root', suffix)], 'unordered', True).next()
 
898
            return next(self.plan_merge_vf.get_record_stream(
 
899
                [('root', suffix)], 'unordered', True))
987
900
        self.assertEqual('a', get_record('A').get_bytes_as('fulltext'))
988
901
        self.assertEqual('c', get_record('C').get_bytes_as('fulltext'))
989
902
        self.assertEqual('e', get_record('E:').get_bytes_as('fulltext'))
999
912
        # we should be able to read from http with a versioned file.
1000
913
        vf = self.get_file()
1001
914
        # try an empty file access
1002
 
        readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
 
915
        readonly_vf = self.get_factory()('foo',
 
916
            transport.get_transport_from_url(self.get_readonly_url('.')))
1003
917
        self.assertEqual([], readonly_vf.versions())
 
918
 
 
919
    def test_readonly_http_works_with_feeling(self):
 
920
        # we should be able to read from http with a versioned file.
 
921
        vf = self.get_file()
1004
922
        # now with feeling.
1005
923
        vf.add_lines('1', [], ['a\n'])
1006
924
        vf.add_lines('2', ['1'], ['b\n', 'a\n'])
1007
 
        readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
 
925
        readonly_vf = self.get_factory()('foo',
 
926
            transport.get_transport_from_url(self.get_readonly_url('.')))
1008
927
        self.assertEqual(['1', '2'], vf.versions())
 
928
        self.assertEqual(['1', '2'], readonly_vf.versions())
1009
929
        for version in readonly_vf.versions():
1010
930
            readonly_vf.get_lines(version)
1011
931
 
1013
933
class TestWeaveHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
1014
934
 
1015
935
    def get_file(self):
1016
 
        return WeaveFile('foo', get_transport(self.get_url('.')), create=True,
1017
 
            get_scope=self.get_transaction)
 
936
        return WeaveFile('foo', self.get_transport(),
 
937
                         create=True,
 
938
                         get_scope=self.get_transaction)
1018
939
 
1019
940
    def get_factory(self):
1020
941
        return WeaveFile
1023
944
class MergeCasesMixin(object):
1024
945
 
1025
946
    def doMerge(self, base, a, b, mp):
1026
 
        from cStringIO import StringIO
1027
947
        from textwrap import dedent
1028
948
 
1029
949
        def addcrlf(x):
1030
950
            return x + '\n'
1031
951
 
1032
952
        w = self.get_file()
1033
 
        w.add_lines('text0', [], map(addcrlf, base))
1034
 
        w.add_lines('text1', ['text0'], map(addcrlf, a))
1035
 
        w.add_lines('text2', ['text0'], map(addcrlf, b))
 
953
        w.add_lines('text0', [], list(map(addcrlf, base)))
 
954
        w.add_lines('text1', ['text0'], list(map(addcrlf, a)))
 
955
        w.add_lines('text2', ['text0'], list(map(addcrlf, b)))
1036
956
 
1037
957
        self.log_contents(w)
1038
958
 
1043
963
                self.log('%12s | %s' % (state, line[:-1]))
1044
964
 
1045
965
        self.log('merge:')
1046
 
        mt = StringIO()
 
966
        mt = BytesIO()
1047
967
        mt.writelines(w.weave_merge(p))
1048
968
        mt.seek(0)
1049
969
        self.log(mt.getvalue())
1050
970
 
1051
 
        mp = map(addcrlf, mp)
 
971
        mp = list(map(addcrlf, mp))
1052
972
        self.assertEqual(mt.readlines(), mp)
1053
973
 
1054
974
 
1264
1184
class TestWeaveMerge(TestCaseWithMemoryTransport, MergeCasesMixin):
1265
1185
 
1266
1186
    def get_file(self, name='foo'):
1267
 
        return WeaveFile(name, get_transport(self.get_url('.')), create=True)
 
1187
        return WeaveFile(name, self.get_transport(),
 
1188
                         create=True)
1268
1189
 
1269
1190
    def log_contents(self, w):
1270
1191
        self.log('weave is:')
1271
 
        tmpf = StringIO()
 
1192
        tmpf = BytesIO()
1272
1193
        write_weave(w, tmpf)
1273
1194
        self.log(tmpf.getvalue())
1274
1195
 
1309
1230
        """Grab the interested adapted texts for tests."""
1310
1231
        # origin is a fulltext
1311
1232
        entries = f.get_record_stream([('origin',)], 'unordered', False)
1312
 
        base = entries.next()
 
1233
        base = next(entries)
1313
1234
        ft_data = ft_adapter.get_bytes(base)
1314
1235
        # merged is both a delta and multiple parents.
1315
1236
        entries = f.get_record_stream([('merged',)], 'unordered', False)
1316
 
        merged = entries.next()
 
1237
        merged = next(entries)
1317
1238
        delta_data = delta_adapter.get_bytes(merged)
1318
1239
        return ft_data, delta_data
1319
1240
 
1329
1250
            'version origin 1 b284f94827db1fa2970d9e2014f080413b547a7e\n'
1330
1251
            'origin\n'
1331
1252
            'end origin\n',
1332
 
            GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
 
1253
            GzipFile(mode='rb', fileobj=BytesIO(ft_data)).read())
1333
1254
        self.assertEqual(
1334
1255
            'version merged 4 32c2e79763b3f90e8ccde37f9710b6629c25a796\n'
1335
1256
            '1,2,3\nleft\nright\nmerged\nend merged\n',
1336
 
            GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
 
1257
            GzipFile(mode='rb', fileobj=BytesIO(delta_data)).read())
1337
1258
 
1338
1259
    def test_deannotation(self):
1339
1260
        """Test converting annotated knits to unannotated knits."""
1347
1268
            'version origin 1 00e364d235126be43292ab09cb4686cf703ddc17\n'
1348
1269
            'origin\n'
1349
1270
            'end origin\n',
1350
 
            GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
 
1271
            GzipFile(mode='rb', fileobj=BytesIO(ft_data)).read())
1351
1272
        self.assertEqual(
1352
1273
            'version merged 3 ed8bce375198ea62444dc71952b22cfc2b09226d\n'
1353
1274
            '2,2,2\nright\nmerged\nend merged\n',
1354
 
            GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
 
1275
            GzipFile(mode='rb', fileobj=BytesIO(delta_data)).read())
1355
1276
 
1356
1277
    def test_annotated_to_fulltext_no_eol(self):
1357
1278
        """Test adapting annotated knits to full texts (for -> weaves)."""
1463
1384
class TestVersionedFiles(TestCaseWithMemoryTransport):
1464
1385
    """Tests for the multiple-file variant of VersionedFile."""
1465
1386
 
 
1387
    # We want to be sure of behaviour for:
 
1388
    # weaves prefix layout (weave texts)
 
1389
    # individually named weaves (weave inventories)
 
1390
    # annotated knits - prefix|hash|hash-escape layout, we test the third only
 
1391
    #                   as it is the most complex mapper.
 
1392
    # individually named knits
 
1393
    # individual no-graph knits in packs (signatures)
 
1394
    # individual graph knits in packs (inventories)
 
1395
    # individual graph nocompression knits in packs (revisions)
 
1396
    # plain text knits in packs (texts)
 
1397
    len_one_scenarios = [
 
1398
        ('weave-named', {
 
1399
            'cleanup': None,
 
1400
            'factory': make_versioned_files_factory(WeaveFile,
 
1401
                ConstantMapper('inventory')),
 
1402
            'graph': True,
 
1403
            'key_length': 1,
 
1404
            'support_partial_insertion': False,
 
1405
            }),
 
1406
        ('named-knit', {
 
1407
            'cleanup': None,
 
1408
            'factory': make_file_factory(False, ConstantMapper('revisions')),
 
1409
            'graph': True,
 
1410
            'key_length': 1,
 
1411
            'support_partial_insertion': False,
 
1412
            }),
 
1413
        ('named-nograph-nodelta-knit-pack', {
 
1414
            'cleanup': cleanup_pack_knit,
 
1415
            'factory': make_pack_factory(False, False, 1),
 
1416
            'graph': False,
 
1417
            'key_length': 1,
 
1418
            'support_partial_insertion': False,
 
1419
            }),
 
1420
        ('named-graph-knit-pack', {
 
1421
            'cleanup': cleanup_pack_knit,
 
1422
            'factory': make_pack_factory(True, True, 1),
 
1423
            'graph': True,
 
1424
            'key_length': 1,
 
1425
            'support_partial_insertion': True,
 
1426
            }),
 
1427
        ('named-graph-nodelta-knit-pack', {
 
1428
            'cleanup': cleanup_pack_knit,
 
1429
            'factory': make_pack_factory(True, False, 1),
 
1430
            'graph': True,
 
1431
            'key_length': 1,
 
1432
            'support_partial_insertion': False,
 
1433
            }),
 
1434
        ('groupcompress-nograph', {
 
1435
            'cleanup': groupcompress.cleanup_pack_group,
 
1436
            'factory': groupcompress.make_pack_factory(False, False, 1),
 
1437
            'graph': False,
 
1438
            'key_length': 1,
 
1439
            'support_partial_insertion': False,
 
1440
            }),
 
1441
        ]
 
1442
    len_two_scenarios = [
 
1443
        ('weave-prefix', {
 
1444
            'cleanup': None,
 
1445
            'factory': make_versioned_files_factory(WeaveFile,
 
1446
                PrefixMapper()),
 
1447
            'graph': True,
 
1448
            'key_length': 2,
 
1449
            'support_partial_insertion': False,
 
1450
            }),
 
1451
        ('annotated-knit-escape', {
 
1452
            'cleanup': None,
 
1453
            'factory': make_file_factory(True, HashEscapedPrefixMapper()),
 
1454
            'graph': True,
 
1455
            'key_length': 2,
 
1456
            'support_partial_insertion': False,
 
1457
            }),
 
1458
        ('plain-knit-pack', {
 
1459
            'cleanup': cleanup_pack_knit,
 
1460
            'factory': make_pack_factory(True, True, 2),
 
1461
            'graph': True,
 
1462
            'key_length': 2,
 
1463
            'support_partial_insertion': True,
 
1464
            }),
 
1465
        ('groupcompress', {
 
1466
            'cleanup': groupcompress.cleanup_pack_group,
 
1467
            'factory': groupcompress.make_pack_factory(True, False, 1),
 
1468
            'graph': True,
 
1469
            'key_length': 1,
 
1470
            'support_partial_insertion': False,
 
1471
            }),
 
1472
        ]
 
1473
 
 
1474
    scenarios = len_one_scenarios + len_two_scenarios
 
1475
 
1466
1476
    def get_versionedfiles(self, relpath='files'):
1467
1477
        transport = self.get_transport(relpath)
1468
1478
        if relpath != '.':
1479
1489
        else:
1480
1490
            return ('FileA',) + (suffix,)
1481
1491
 
 
1492
    def test_add_fallback_implies_without_fallbacks(self):
 
1493
        f = self.get_versionedfiles('files')
 
1494
        if getattr(f, 'add_fallback_versioned_files', None) is None:
 
1495
            raise TestNotApplicable("%s doesn't support fallbacks"
 
1496
                                    % (f.__class__.__name__,))
 
1497
        g = self.get_versionedfiles('fallback')
 
1498
        key_a = self.get_simple_key('a')
 
1499
        g.add_lines(key_a, [], ['\n'])
 
1500
        f.add_fallback_versioned_files(g)
 
1501
        self.assertTrue(key_a in f.get_parent_map([key_a]))
 
1502
        self.assertFalse(key_a in f.without_fallbacks().get_parent_map([key_a]))
 
1503
 
1482
1504
    def test_add_lines(self):
1483
1505
        f = self.get_versionedfiles()
1484
1506
        key0 = self.get_simple_key('r0')
1499
1521
        records.sort()
1500
1522
        self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1501
1523
 
1502
 
    def test__add_text(self):
1503
 
        f = self.get_versionedfiles()
1504
 
        key0 = self.get_simple_key('r0')
1505
 
        key1 = self.get_simple_key('r1')
1506
 
        key2 = self.get_simple_key('r2')
1507
 
        keyf = self.get_simple_key('foo')
1508
 
        f._add_text(key0, [], 'a\nb\n')
1509
 
        if self.graph:
1510
 
            f._add_text(key1, [key0], 'b\nc\n')
1511
 
        else:
1512
 
            f._add_text(key1, [], 'b\nc\n')
1513
 
        keys = f.keys()
1514
 
        self.assertTrue(key0 in keys)
1515
 
        self.assertTrue(key1 in keys)
1516
 
        records = []
1517
 
        for record in f.get_record_stream([key0, key1], 'unordered', True):
1518
 
            records.append((record.key, record.get_bytes_as('fulltext')))
1519
 
        records.sort()
1520
 
        self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1521
 
 
1522
1524
    def test_annotate(self):
1523
1525
        files = self.get_versionedfiles()
1524
1526
        self.get_diamond_files(files)
1606
1608
                sha, _, _ = vf.add_lines(self.get_simple_key(version), [],
1607
1609
                                         lines)
1608
1610
            else:
1609
 
                sha, _, _ = vf._add_text(self.get_simple_key(version), [],
1610
 
                                         ''.join(lines))
 
1611
                sha, _, _ = vf.add_lines(self.get_simple_key(version), [],
 
1612
                                         lines)
1611
1613
            shas.append(sha)
1612
1614
        # we now have a copy of all the lines in the vf.
1613
1615
        for sha, (version, lines) in zip(
1617
1619
                vf.add_lines, new_key, [], lines,
1618
1620
                nostore_sha=sha)
1619
1621
            self.assertRaises(errors.ExistingContent,
1620
 
                vf._add_text, new_key, [], ''.join(lines),
 
1622
                vf.add_lines, new_key, [], lines,
1621
1623
                nostore_sha=sha)
1622
1624
            # and no new version should have been added.
1623
 
            record = vf.get_record_stream([new_key], 'unordered', True).next()
 
1625
            record = next(vf.get_record_stream([new_key], 'unordered', True))
1624
1626
            self.assertEqual('absent', record.storage_kind)
1625
1627
 
1626
1628
    def test_add_lines_nostoresha(self):
1627
1629
        self._add_content_nostoresha(add_lines=True)
1628
1630
 
1629
 
    def test__add_text_nostoresha(self):
1630
 
        self._add_content_nostoresha(add_lines=False)
1631
 
 
1632
1631
    def test_add_lines_return(self):
1633
1632
        files = self.get_versionedfiles()
1634
1633
        # save code by using the stock data insertion helper.
1678
1677
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1679
1678
                results)
1680
1679
            # Check the added items got CHK keys.
1681
 
            self.assertEqual(set([
 
1680
            self.assertEqual({
1682
1681
                ('sha1:00e364d235126be43292ab09cb4686cf703ddc17',),
1683
1682
                ('sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',),
1684
1683
                ('sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',),
1685
1684
                ('sha1:a8478686da38e370e32e42e8a0c220e33ee9132f',),
1686
1685
                ('sha1:ed8bce375198ea62444dc71952b22cfc2b09226d',),
1687
 
                ]),
 
1686
                },
1688
1687
                files.keys())
1689
1688
        elif self.key_length == 2:
1690
1689
            self.assertEqual([
1700
1699
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1701
1700
                results)
1702
1701
            # Check the added items got CHK keys.
1703
 
            self.assertEqual(set([
 
1702
            self.assertEqual({
1704
1703
                ('FileA', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1705
1704
                ('FileA', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1706
1705
                ('FileA', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1711
1710
                ('FileB', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1712
1711
                ('FileB', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1713
1712
                ('FileB', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1714
 
                ]),
 
1713
                },
1715
1714
                files.keys())
1716
1715
 
1717
1716
    def test_empty_lines(self):
1838
1837
                ('FileB', 'base'),
1839
1838
                ]
1840
1839
            sort_order = {
1841
 
                ('FileA', 'merged'):2, ('FileA', 'left'):1, ('FileA', 'right'):1,
1842
 
                ('FileA', 'base'):0,
1843
 
                ('FileB', 'merged'):2, ('FileB', 'left'):1, ('FileB', 'right'):1,
1844
 
                ('FileB', 'base'):0,
 
1840
                ('FileA', 'merged'): 2, ('FileA', 'left'): 1, ('FileA', 'right'): 1,
 
1841
                ('FileA', 'base'): 0,
 
1842
                ('FileB', 'merged'): 2, ('FileB', 'left'): 1, ('FileB', 'right'): 1,
 
1843
                ('FileB', 'base'): 0,
1845
1844
                }
1846
1845
        return keys, sort_order
1847
1846
 
1858
1857
                ('FileB', 'base'),
1859
1858
                ]
1860
1859
            sort_order = {
1861
 
                ('FileA', 'merged'):0, ('FileA', 'left'):1, ('FileA', 'right'):1,
1862
 
                ('FileA', 'base'):2,
1863
 
                ('FileB', 'merged'):3, ('FileB', 'left'):4, ('FileB', 'right'):4,
1864
 
                ('FileB', 'base'):5,
 
1860
                ('FileA', 'merged'): 0, ('FileA', 'left'): 1, ('FileA', 'right'): 1,
 
1861
                ('FileA', 'base'): 2,
 
1862
                ('FileB', 'merged'): 3, ('FileB', 'left'): 4, ('FileB', 'right'): 4,
 
1863
                ('FileB', 'base'): 5,
1865
1864
                }
1866
1865
        return keys, sort_order
1867
1866
 
1985
1984
        key = self.get_simple_key('foo')
1986
1985
        files.add_lines(key, (), ['my text\n', 'content'])
1987
1986
        stream = files.get_record_stream([key], 'unordered', False)
1988
 
        record = stream.next()
 
1987
        record = next(stream)
1989
1988
        if record.storage_kind in ('chunked', 'fulltext'):
1990
1989
            # chunked and fulltext representations are for direct use not wire
1991
1990
            # serialisation: check they are able to be used directly. To send
2012
2011
        :param records: A list to collect the seen records.
2013
2012
        :return: A generator of the records in stream.
2014
2013
        """
2015
 
        # We make assertions during copying to catch things early for
2016
 
        # easier debugging.
2017
 
        for record, ref_record in izip(stream, expected):
 
2014
        # We make assertions during copying to catch things early for easier
 
2015
        # debugging. This must use the iterating zip() from the future.
 
2016
        for record, ref_record in zip(stream, expected):
2018
2017
            records.append(record)
2019
2018
            self.assertEqual(ref_record.key, record.key)
2020
2019
            self.assertEqual(ref_record.storage_kind, record.storage_kind)
2237
2236
                (('r1',), self.get_parents((('r0',),))),
2238
2237
                (('r2',), self.get_parents(())),
2239
2238
                (('r3',), self.get_parents(())),
2240
 
                (('m',), self.get_parents((('r0',),('r1',),('r2',),('r3',)))),
 
2239
                (('m',), self.get_parents((('r0',), ('r1',), ('r2',), ('r3',)))),
2241
2240
                ]
2242
2241
        else:
2243
2242
            parent_details = [
2428
2427
        origin_entries = source.get_record_stream(origin_keys, 'unordered', False)
2429
2428
        end_entries = source.get_record_stream(end_keys, 'topological', False)
2430
2429
        start_entries = source.get_record_stream(start_keys, 'topological', False)
2431
 
        entries = chain(origin_entries, end_entries, start_entries)
 
2430
        entries = itertools.chain(origin_entries, end_entries, start_entries)
2432
2431
        try:
2433
2432
            files.insert_record_stream(entries)
2434
2433
        except RevisionNotPresent:
2460
2459
        streams = []
2461
2460
        for key in reversed(keys):
2462
2461
            streams.append(source.get_record_stream([key], 'unordered', False))
2463
 
        deltas = chain(*streams[:-1])
 
2462
        deltas = itertools.chain.from_iterable(streams[:-1])
2464
2463
        files = self.get_versionedfiles()
2465
2464
        try:
2466
2465
            files.insert_record_stream(deltas)
2500
2499
                list(files.get_missing_compression_parent_keys()))
2501
2500
            files.insert_record_stream(entries)
2502
2501
            missing_bases = files.get_missing_compression_parent_keys()
2503
 
            self.assertEqual(set([self.get_simple_key('left')]),
 
2502
            self.assertEqual({self.get_simple_key('left')},
2504
2503
                set(missing_bases))
2505
2504
            self.assertEqual(set(keys), set(files.get_parent_map(keys)))
2506
2505
        else:
2524
2523
        files = self.get_versionedfiles()
2525
2524
        files.insert_record_stream(entries)
2526
2525
        missing_bases = files.get_missing_compression_parent_keys()
2527
 
        self.assertEqual(set([self.get_simple_key('left')]),
 
2526
        self.assertEqual({self.get_simple_key('left')},
2528
2527
            set(missing_bases))
2529
2528
        # 'merged' is inserted (although a commit of a write group involving
2530
2529
        # this versionedfiles would fail).
2615
2614
            lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2616
2615
 
2617
2616
    def test_make_mpdiffs(self):
2618
 
        from bzrlib import multiparent
 
2617
        from breezy import multiparent
2619
2618
        files = self.get_versionedfiles('source')
2620
2619
        # add texts that should trip the knit maximum delta chain threshold
2621
2620
        # as well as doing parallel chains of data in knits.
2654
2653
        next_parent = self.get_simple_key('base')
2655
2654
        text_name = 'chain1-'
2656
2655
        text = ['line\n']
2657
 
        sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
2658
 
                 1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
2659
 
                 2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
2660
 
                 3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
2661
 
                 4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
2662
 
                 5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
2663
 
                 6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
2664
 
                 7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
2665
 
                 8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
2666
 
                 9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
2667
 
                 10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
2668
 
                 11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
2669
 
                 12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
2670
 
                 13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
2671
 
                 14:'2c4b1736566b8ca6051e668de68650686a3922f2',
2672
 
                 15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
2673
 
                 16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
2674
 
                 17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
2675
 
                 18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
2676
 
                 19:'1ebed371807ba5935958ad0884595126e8c4e823',
2677
 
                 20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
2678
 
                 21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
2679
 
                 22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
2680
 
                 23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
2681
 
                 24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
2682
 
                 25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
 
2656
        sha1s = {0: 'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
 
2657
                 1: '45e21ea146a81ea44a821737acdb4f9791c8abe7',
 
2658
                 2: 'e1f11570edf3e2a070052366c582837a4fe4e9fa',
 
2659
                 3: '26b4b8626da827088c514b8f9bbe4ebf181edda1',
 
2660
                 4: 'e28a5510be25ba84d31121cff00956f9970ae6f6',
 
2661
                 5: 'd63ec0ce22e11dcf65a931b69255d3ac747a318d',
 
2662
                 6: '2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
 
2663
                 7: '95c14da9cafbf828e3e74a6f016d87926ba234ab',
 
2664
                 8: '779e9a0b28f9f832528d4b21e17e168c67697272',
 
2665
                 9: '1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
 
2666
                 10: '131a2ae712cf51ed62f143e3fbac3d4206c25a05',
 
2667
                 11: 'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
 
2668
                 12: '31a2286267f24d8bedaa43355f8ad7129509ea85',
 
2669
                 13: 'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
 
2670
                 14: '2c4b1736566b8ca6051e668de68650686a3922f2',
 
2671
                 15: '5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
 
2672
                 16: 'b0d2e18d3559a00580f6b49804c23fea500feab3',
 
2673
                 17: '8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
 
2674
                 18: '5cf64a3459ae28efa60239e44b20312d25b253f3',
 
2675
                 19: '1ebed371807ba5935958ad0884595126e8c4e823',
 
2676
                 20: '2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
 
2677
                 21: '01edc447978004f6e4e962b417a4ae1955b6fe5d',
 
2678
                 22: 'd8d8dc49c4bf0bab401e0298bb5ad827768618bb',
 
2679
                 23: 'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
 
2680
                 24: 'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
 
2681
                 25: 'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
2683
2682
                 }
2684
2683
        for depth in range(26):
2685
2684
            new_version = self.get_simple_key(text_name + '%s' % depth)
2717
2716
        else:
2718
2717
            key = ('foo', 'bar',)
2719
2718
        files.add_lines(key, (), [])
2720
 
        self.assertEqual(set([key]), set(files.keys()))
 
2719
        self.assertEqual({key}, set(files.keys()))
2721
2720
 
2722
2721
 
2723
2722
class VirtualVersionedFilesTests(TestCase):
2731
2730
        return ret
2732
2731
 
2733
2732
    def setUp(self):
2734
 
        TestCase.setUp(self)
 
2733
        super(VirtualVersionedFilesTests, self).setUp()
2735
2734
        self._lines = {}
2736
2735
        self._parent_map = {}
2737
2736
        self.texts = VirtualVersionedFiles(self._get_parent_map,
2753
2752
                          [])
2754
2753
 
2755
2754
    def test_get_sha1s_nonexistent(self):
2756
 
        self.assertEquals({}, self.texts.get_sha1s([("NONEXISTENT",)]))
 
2755
        self.assertEqual({}, self.texts.get_sha1s([("NONEXISTENT",)]))
2757
2756
 
2758
2757
    def test_get_sha1s(self):
2759
2758
        self._lines["key"] = ["dataline1", "dataline2"]
2760
 
        self.assertEquals({("key",): osutils.sha_strings(self._lines["key"])},
 
2759
        self.assertEqual({("key",): osutils.sha_strings(self._lines["key"])},
2761
2760
                           self.texts.get_sha1s([("key",)]))
2762
2761
 
2763
2762
    def test_get_parent_map(self):
2764
2763
        self._parent_map = {"G": ("A", "B")}
2765
 
        self.assertEquals({("G",): (("A",),("B",))},
 
2764
        self.assertEqual({("G",): (("A",), ("B",))},
2766
2765
                          self.texts.get_parent_map([("G",), ("L",)]))
2767
2766
 
2768
2767
    def test_get_record_stream(self):
2769
2768
        self._lines["A"] = ["FOO", "BAR"]
2770
2769
        it = self.texts.get_record_stream([("A",)], "unordered", True)
2771
 
        record = it.next()
2772
 
        self.assertEquals("chunked", record.storage_kind)
2773
 
        self.assertEquals("FOOBAR", record.get_bytes_as("fulltext"))
2774
 
        self.assertEquals(["FOO", "BAR"], record.get_bytes_as("chunked"))
 
2770
        record = next(it)
 
2771
        self.assertEqual("chunked", record.storage_kind)
 
2772
        self.assertEqual("FOOBAR", record.get_bytes_as("fulltext"))
 
2773
        self.assertEqual(["FOO", "BAR"], record.get_bytes_as("chunked"))
2775
2774
 
2776
2775
    def test_get_record_stream_absent(self):
2777
2776
        it = self.texts.get_record_stream([("A",)], "unordered", True)
2778
 
        record = it.next()
2779
 
        self.assertEquals("absent", record.storage_kind)
 
2777
        record = next(it)
 
2778
        self.assertEqual("absent", record.storage_kind)
2780
2779
 
2781
2780
    def test_iter_lines_added_or_present_in_keys(self):
2782
2781
        self._lines["A"] = ["FOO", "BAR"]
2783
2782
        self._lines["B"] = ["HEY"]
2784
2783
        self._lines["C"] = ["Alberta"]
2785
2784
        it = self.texts.iter_lines_added_or_present_in_keys([("A",), ("B",)])
2786
 
        self.assertEquals(sorted([("FOO", "A"), ("BAR", "A"), ("HEY", "B")]),
 
2785
        self.assertEqual(sorted([("FOO", "A"), ("BAR", "A"), ("HEY", "B")]),
2787
2786
            sorted(list(it)))
2788
2787
 
2789
2788
 
2792
2791
    def get_ordering_vf(self, key_priority):
2793
2792
        builder = self.make_branch_builder('test')
2794
2793
        builder.start_series()
2795
 
        builder.build_snapshot('A', None, [
2796
 
            ('add', ('', 'TREE_ROOT', 'directory', None))])
2797
 
        builder.build_snapshot('B', ['A'], [])
2798
 
        builder.build_snapshot('C', ['B'], [])
2799
 
        builder.build_snapshot('D', ['C'], [])
 
2794
        builder.build_snapshot(None, [
 
2795
            ('add', ('', 'TREE_ROOT', 'directory', None))],
 
2796
            revision_id='A')
 
2797
        builder.build_snapshot(['A'], [], revision_id='B')
 
2798
        builder.build_snapshot(['B'], [], revision_id='C')
 
2799
        builder.build_snapshot(['C'], [], revision_id='D')
2800
2800
        builder.finish_series()
2801
2801
        b = builder.get_branch()
2802
2802
        b.lock_read()