/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/tests/per_versionedfile.py

  • Committer: Jelmer Vernooij
  • Date: 2017-07-10 23:47:19 UTC
  • mto: This revision was merged to the branch mainline in revision 6732.
  • Revision ID: jelmer@jelmer.uk-20170710234719-6gec6320uvchcslm
Move lazy regex error to breezy.lazy_regex.

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006-2010 Canonical Ltd
 
1
# Copyright (C) 2006-2012, 2016 Canonical Ltd
2
2
#
3
3
# Authors:
4
4
#   Johan Rydberg <jrydberg@gnu.org>
21
21
# TODO: might be nice to create a versionedfile with some type of corruption
22
22
# considered typical and check that it can be detected/corrected.
23
23
 
24
 
from itertools import chain, izip
25
 
from StringIO import StringIO
 
24
from gzip import GzipFile
 
25
import itertools
26
26
 
27
 
from bzrlib import (
 
27
from .. import (
28
28
    errors,
29
29
    graph as _mod_graph,
30
 
    groupcompress,
31
 
    knit as _mod_knit,
32
30
    osutils,
33
31
    progress,
 
32
    transport,
34
33
    ui,
35
34
    )
36
 
from bzrlib.errors import (
37
 
                           RevisionNotPresent,
38
 
                           RevisionAlreadyPresent,
39
 
                           WeaveParentMismatch
40
 
                           )
41
 
from bzrlib.knit import (
 
35
from ..bzr import (
 
36
    groupcompress,
 
37
    knit as _mod_knit,
 
38
    )
 
39
from ..errors import (
 
40
    RevisionNotPresent,
 
41
    RevisionAlreadyPresent,
 
42
    )
 
43
from ..bzr.knit import (
42
44
    cleanup_pack_knit,
43
45
    make_file_factory,
44
46
    make_pack_factory,
45
 
    KnitAnnotateFactory,
46
 
    KnitPlainFactory,
47
 
    )
48
 
from bzrlib.tests import (
 
47
    )
 
48
from ..sixish import (
 
49
    BytesIO,
 
50
    zip,
 
51
    )
 
52
from . import (
49
53
    TestCase,
50
54
    TestCaseWithMemoryTransport,
51
55
    TestNotApplicable,
52
56
    TestSkipped,
53
 
    condition_isinstance,
54
 
    split_suite_by_condition,
55
 
    multiply_tests,
56
57
    )
57
 
from bzrlib.tests.http_utils import TestCaseWithWebserver
58
 
from bzrlib.trace import mutter
59
 
from bzrlib.transport import get_transport
60
 
from bzrlib.transport.memory import MemoryTransport
61
 
from bzrlib.tsort import topo_sort
62
 
from bzrlib.tuned_gzip import GzipFile
63
 
import bzrlib.versionedfile as versionedfile
64
 
from bzrlib.versionedfile import (
 
58
from .http_utils import TestCaseWithWebserver
 
59
from ..transport.memory import MemoryTransport
 
60
from ..bzr import versionedfile as versionedfile
 
61
from ..bzr.versionedfile import (
65
62
    ConstantMapper,
66
63
    HashEscapedPrefixMapper,
67
64
    PrefixMapper,
68
65
    VirtualVersionedFiles,
69
66
    make_versioned_files_factory,
70
67
    )
71
 
from bzrlib.weave import WeaveFile
72
 
from bzrlib.weavefile import read_weave, write_weave
73
 
 
74
 
 
75
 
def load_tests(standard_tests, module, loader):
76
 
    """Parameterize VersionedFiles tests for different implementations."""
77
 
    to_adapt, result = split_suite_by_condition(
78
 
        standard_tests, condition_isinstance(TestVersionedFiles))
79
 
    # We want to be sure of behaviour for:
80
 
    # weaves prefix layout (weave texts)
81
 
    # individually named weaves (weave inventories)
82
 
    # annotated knits - prefix|hash|hash-escape layout, we test the third only
83
 
    #                   as it is the most complex mapper.
84
 
    # individually named knits
85
 
    # individual no-graph knits in packs (signatures)
86
 
    # individual graph knits in packs (inventories)
87
 
    # individual graph nocompression knits in packs (revisions)
88
 
    # plain text knits in packs (texts)
89
 
    len_one_scenarios = [
90
 
        ('weave-named', {
91
 
            'cleanup':None,
92
 
            'factory':make_versioned_files_factory(WeaveFile,
93
 
                ConstantMapper('inventory')),
94
 
            'graph':True,
95
 
            'key_length':1,
96
 
            'support_partial_insertion': False,
97
 
            }),
98
 
        ('named-knit', {
99
 
            'cleanup':None,
100
 
            'factory':make_file_factory(False, ConstantMapper('revisions')),
101
 
            'graph':True,
102
 
            'key_length':1,
103
 
            'support_partial_insertion': False,
104
 
            }),
105
 
        ('named-nograph-nodelta-knit-pack', {
106
 
            'cleanup':cleanup_pack_knit,
107
 
            'factory':make_pack_factory(False, False, 1),
108
 
            'graph':False,
109
 
            'key_length':1,
110
 
            'support_partial_insertion': False,
111
 
            }),
112
 
        ('named-graph-knit-pack', {
113
 
            'cleanup':cleanup_pack_knit,
114
 
            'factory':make_pack_factory(True, True, 1),
115
 
            'graph':True,
116
 
            'key_length':1,
117
 
            'support_partial_insertion': True,
118
 
            }),
119
 
        ('named-graph-nodelta-knit-pack', {
120
 
            'cleanup':cleanup_pack_knit,
121
 
            'factory':make_pack_factory(True, False, 1),
122
 
            'graph':True,
123
 
            'key_length':1,
124
 
            'support_partial_insertion': False,
125
 
            }),
126
 
        ('groupcompress-nograph', {
127
 
            'cleanup':groupcompress.cleanup_pack_group,
128
 
            'factory':groupcompress.make_pack_factory(False, False, 1),
129
 
            'graph': False,
130
 
            'key_length':1,
131
 
            'support_partial_insertion':False,
132
 
            }),
133
 
        ]
134
 
    len_two_scenarios = [
135
 
        ('weave-prefix', {
136
 
            'cleanup':None,
137
 
            'factory':make_versioned_files_factory(WeaveFile,
138
 
                PrefixMapper()),
139
 
            'graph':True,
140
 
            'key_length':2,
141
 
            'support_partial_insertion': False,
142
 
            }),
143
 
        ('annotated-knit-escape', {
144
 
            'cleanup':None,
145
 
            'factory':make_file_factory(True, HashEscapedPrefixMapper()),
146
 
            'graph':True,
147
 
            'key_length':2,
148
 
            'support_partial_insertion': False,
149
 
            }),
150
 
        ('plain-knit-pack', {
151
 
            'cleanup':cleanup_pack_knit,
152
 
            'factory':make_pack_factory(True, True, 2),
153
 
            'graph':True,
154
 
            'key_length':2,
155
 
            'support_partial_insertion': True,
156
 
            }),
157
 
        ('groupcompress', {
158
 
            'cleanup':groupcompress.cleanup_pack_group,
159
 
            'factory':groupcompress.make_pack_factory(True, False, 1),
160
 
            'graph': True,
161
 
            'key_length':1,
162
 
            'support_partial_insertion':False,
163
 
            }),
164
 
        ]
165
 
    scenarios = len_one_scenarios + len_two_scenarios
166
 
    return multiply_tests(to_adapt, scenarios, result)
 
68
from ..bzr.weave import WeaveFile
 
69
from ..bzr.weavefile import write_weave
 
70
from .scenarios import load_tests_apply_scenarios
 
71
 
 
72
 
 
73
load_tests = load_tests_apply_scenarios
167
74
 
168
75
 
169
76
def get_diamond_vf(f, trailing_eol=True, left_only=False):
280
187
            versions = f.versions()
281
188
            self.assertTrue('r0' in versions)
282
189
            self.assertTrue('r1' in versions)
283
 
            self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
284
 
            self.assertEquals(f.get_text('r0'), 'a\nb\n')
285
 
            self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
 
190
            self.assertEqual(f.get_lines('r0'), ['a\n', 'b\n'])
 
191
            self.assertEqual(f.get_text('r0'), 'a\nb\n')
 
192
            self.assertEqual(f.get_lines('r1'), ['b\n', 'c\n'])
286
193
            self.assertEqual(2, len(f))
287
194
            self.assertEqual(2, f.num_versions())
288
195
 
314
221
            self.assertTrue('r0' in versions)
315
222
            self.assertTrue('r1' in versions)
316
223
            self.assertTrue('r2' in versions)
317
 
            self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
318
 
            self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
319
 
            self.assertEquals(f.get_lines('r2'), ['c\n', 'd\n'])
 
224
            self.assertEqual(f.get_lines('r0'), ['a\n', 'b\n'])
 
225
            self.assertEqual(f.get_lines('r1'), ['b\n', 'c\n'])
 
226
            self.assertEqual(f.get_lines('r2'), ['c\n', 'd\n'])
320
227
            self.assertEqual(3, f.num_versions())
321
228
            origins = f.annotate('r1')
322
 
            self.assertEquals(origins[0][0], 'r0')
323
 
            self.assertEquals(origins[1][0], 'r1')
 
229
            self.assertEqual(origins[0][0], 'r0')
 
230
            self.assertEqual(origins[1][0], 'r1')
324
231
            origins = f.annotate('r2')
325
 
            self.assertEquals(origins[0][0], 'r1')
326
 
            self.assertEquals(origins[1][0], 'r2')
 
232
            self.assertEqual(origins[0][0], 'r1')
 
233
            self.assertEqual(origins[1][0], 'r2')
327
234
 
328
235
        verify_file(f)
329
236
        f = self.reopen_file()
503
410
 
504
411
    def test_add_lines_with_matching_blocks_noeol_last_line(self):
505
412
        """Add a text with an unchanged last line with no eol should work."""
506
 
        from bzrlib import multiparent
 
413
        from breezy import multiparent
507
414
        # Hand verified sha1 of the text we're adding.
508
415
        sha1 = '6a1d115ec7b60afb664dc14890b5af5ce3c827a4'
509
416
        # Create a mpdiff which adds a new line before the trailing line, and
524
431
        self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
525
432
 
526
433
    def test_make_mpdiffs(self):
527
 
        from bzrlib import multiparent
 
434
        from breezy import multiparent
528
435
        vf = self.get_file('foo')
529
436
        sha1s = self._setup_for_deltas(vf)
530
437
        new_vf = self.get_file('bar')
693
600
        f.add_lines('r0', [], ['a\n', 'b\n'])
694
601
        f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
695
602
        origins = f.annotate('r1')
696
 
        self.assertEquals(origins[0][0], 'r1')
697
 
        self.assertEquals(origins[1][0], 'r0')
 
603
        self.assertEqual(origins[0][0], 'r1')
 
604
        self.assertEqual(origins[1][0], 'r0')
698
605
 
699
606
        self.assertRaises(RevisionNotPresent,
700
607
            f.annotate, 'foo')
844
751
                                 ['base', 'a_ghost'],
845
752
                                 ['line\n', 'line_b\n', 'line_c\n'])
846
753
        origins = vf.annotate('references_ghost')
847
 
        self.assertEquals(('base', 'line\n'), origins[0])
848
 
        self.assertEquals(('base', 'line_b\n'), origins[1])
849
 
        self.assertEquals(('references_ghost', 'line_c\n'), origins[2])
 
754
        self.assertEqual(('base', 'line\n'), origins[0])
 
755
        self.assertEqual(('base', 'line_b\n'), origins[1])
 
756
        self.assertEqual(('references_ghost', 'line_c\n'), origins[2])
850
757
 
851
758
    def test_readonly_mode(self):
852
 
        transport = get_transport(self.get_url('.'))
 
759
        t = self.get_transport()
853
760
        factory = self.get_factory()
854
 
        vf = factory('id', transport, 0777, create=True, access_mode='w')
855
 
        vf = factory('id', transport, access_mode='r')
 
761
        vf = factory('id', t, 0o777, create=True, access_mode='w')
 
762
        vf = factory('id', t, access_mode='r')
856
763
        self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
857
764
        self.assertRaises(errors.ReadOnlyError,
858
765
                          vf.add_lines_with_ghosts,
880
787
class TestWeave(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
881
788
 
882
789
    def get_file(self, name='foo'):
883
 
        return WeaveFile(name, get_transport(self.get_url('.')), create=True,
884
 
            get_scope=self.get_transaction)
 
790
        return WeaveFile(name, self.get_transport(),
 
791
                         create=True,
 
792
                         get_scope=self.get_transaction)
885
793
 
886
794
    def get_file_corrupted_text(self):
887
 
        w = WeaveFile('foo', get_transport(self.get_url('.')), create=True,
888
 
            get_scope=self.get_transaction)
 
795
        w = WeaveFile('foo', self.get_transport(),
 
796
                      create=True,
 
797
                      get_scope=self.get_transaction)
889
798
        w.add_lines('v1', [], ['hello\n'])
890
799
        w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
891
800
 
919
828
        return w
920
829
 
921
830
    def reopen_file(self, name='foo', create=False):
922
 
        return WeaveFile(name, get_transport(self.get_url('.')), create=create,
923
 
            get_scope=self.get_transaction)
 
831
        return WeaveFile(name, self.get_transport(),
 
832
                         create=create,
 
833
                         get_scope=self.get_transaction)
924
834
 
925
835
    def test_no_implicit_create(self):
926
836
        self.assertRaises(errors.NoSuchFile,
927
837
                          WeaveFile,
928
838
                          'foo',
929
 
                          get_transport(self.get_url('.')),
 
839
                          self.get_transport(),
930
840
                          get_scope=self.get_transaction)
931
841
 
932
842
    def get_factory(self):
936
846
class TestPlanMergeVersionedFile(TestCaseWithMemoryTransport):
937
847
 
938
848
    def setUp(self):
939
 
        TestCaseWithMemoryTransport.setUp(self)
 
849
        super(TestPlanMergeVersionedFile, self).setUp()
940
850
        mapper = PrefixMapper()
941
851
        factory = make_file_factory(True, mapper)
942
852
        self.vf1 = factory(self.get_transport('root-1'))
982
892
    def test_get_record_stream(self):
983
893
        self.setup_abcde()
984
894
        def get_record(suffix):
985
 
            return self.plan_merge_vf.get_record_stream(
986
 
                [('root', suffix)], 'unordered', True).next()
 
895
            return next(self.plan_merge_vf.get_record_stream(
 
896
                [('root', suffix)], 'unordered', True))
987
897
        self.assertEqual('a', get_record('A').get_bytes_as('fulltext'))
988
898
        self.assertEqual('c', get_record('C').get_bytes_as('fulltext'))
989
899
        self.assertEqual('e', get_record('E:').get_bytes_as('fulltext'))
999
909
        # we should be able to read from http with a versioned file.
1000
910
        vf = self.get_file()
1001
911
        # try an empty file access
1002
 
        readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
 
912
        readonly_vf = self.get_factory()('foo',
 
913
            transport.get_transport_from_url(self.get_readonly_url('.')))
1003
914
        self.assertEqual([], readonly_vf.versions())
 
915
 
 
916
    def test_readonly_http_works_with_feeling(self):
 
917
        # we should be able to read from http with a versioned file.
 
918
        vf = self.get_file()
1004
919
        # now with feeling.
1005
920
        vf.add_lines('1', [], ['a\n'])
1006
921
        vf.add_lines('2', ['1'], ['b\n', 'a\n'])
1007
 
        readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
 
922
        readonly_vf = self.get_factory()('foo',
 
923
            transport.get_transport_from_url(self.get_readonly_url('.')))
1008
924
        self.assertEqual(['1', '2'], vf.versions())
 
925
        self.assertEqual(['1', '2'], readonly_vf.versions())
1009
926
        for version in readonly_vf.versions():
1010
927
            readonly_vf.get_lines(version)
1011
928
 
1013
930
class TestWeaveHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
1014
931
 
1015
932
    def get_file(self):
1016
 
        return WeaveFile('foo', get_transport(self.get_url('.')), create=True,
1017
 
            get_scope=self.get_transaction)
 
933
        return WeaveFile('foo', self.get_transport(),
 
934
                         create=True,
 
935
                         get_scope=self.get_transaction)
1018
936
 
1019
937
    def get_factory(self):
1020
938
        return WeaveFile
1023
941
class MergeCasesMixin(object):
1024
942
 
1025
943
    def doMerge(self, base, a, b, mp):
1026
 
        from cStringIO import StringIO
1027
944
        from textwrap import dedent
1028
945
 
1029
946
        def addcrlf(x):
1030
947
            return x + '\n'
1031
948
 
1032
949
        w = self.get_file()
1033
 
        w.add_lines('text0', [], map(addcrlf, base))
1034
 
        w.add_lines('text1', ['text0'], map(addcrlf, a))
1035
 
        w.add_lines('text2', ['text0'], map(addcrlf, b))
 
950
        w.add_lines('text0', [], list(map(addcrlf, base)))
 
951
        w.add_lines('text1', ['text0'], list(map(addcrlf, a)))
 
952
        w.add_lines('text2', ['text0'], list(map(addcrlf, b)))
1036
953
 
1037
954
        self.log_contents(w)
1038
955
 
1043
960
                self.log('%12s | %s' % (state, line[:-1]))
1044
961
 
1045
962
        self.log('merge:')
1046
 
        mt = StringIO()
 
963
        mt = BytesIO()
1047
964
        mt.writelines(w.weave_merge(p))
1048
965
        mt.seek(0)
1049
966
        self.log(mt.getvalue())
1050
967
 
1051
 
        mp = map(addcrlf, mp)
 
968
        mp = list(map(addcrlf, mp))
1052
969
        self.assertEqual(mt.readlines(), mp)
1053
970
 
1054
971
 
1264
1181
class TestWeaveMerge(TestCaseWithMemoryTransport, MergeCasesMixin):
1265
1182
 
1266
1183
    def get_file(self, name='foo'):
1267
 
        return WeaveFile(name, get_transport(self.get_url('.')), create=True)
 
1184
        return WeaveFile(name, self.get_transport(),
 
1185
                         create=True)
1268
1186
 
1269
1187
    def log_contents(self, w):
1270
1188
        self.log('weave is:')
1271
 
        tmpf = StringIO()
 
1189
        tmpf = BytesIO()
1272
1190
        write_weave(w, tmpf)
1273
1191
        self.log(tmpf.getvalue())
1274
1192
 
1309
1227
        """Grab the interested adapted texts for tests."""
1310
1228
        # origin is a fulltext
1311
1229
        entries = f.get_record_stream([('origin',)], 'unordered', False)
1312
 
        base = entries.next()
 
1230
        base = next(entries)
1313
1231
        ft_data = ft_adapter.get_bytes(base)
1314
1232
        # merged is both a delta and multiple parents.
1315
1233
        entries = f.get_record_stream([('merged',)], 'unordered', False)
1316
 
        merged = entries.next()
 
1234
        merged = next(entries)
1317
1235
        delta_data = delta_adapter.get_bytes(merged)
1318
1236
        return ft_data, delta_data
1319
1237
 
1329
1247
            'version origin 1 b284f94827db1fa2970d9e2014f080413b547a7e\n'
1330
1248
            'origin\n'
1331
1249
            'end origin\n',
1332
 
            GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
 
1250
            GzipFile(mode='rb', fileobj=BytesIO(ft_data)).read())
1333
1251
        self.assertEqual(
1334
1252
            'version merged 4 32c2e79763b3f90e8ccde37f9710b6629c25a796\n'
1335
1253
            '1,2,3\nleft\nright\nmerged\nend merged\n',
1336
 
            GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
 
1254
            GzipFile(mode='rb', fileobj=BytesIO(delta_data)).read())
1337
1255
 
1338
1256
    def test_deannotation(self):
1339
1257
        """Test converting annotated knits to unannotated knits."""
1347
1265
            'version origin 1 00e364d235126be43292ab09cb4686cf703ddc17\n'
1348
1266
            'origin\n'
1349
1267
            'end origin\n',
1350
 
            GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
 
1268
            GzipFile(mode='rb', fileobj=BytesIO(ft_data)).read())
1351
1269
        self.assertEqual(
1352
1270
            'version merged 3 ed8bce375198ea62444dc71952b22cfc2b09226d\n'
1353
1271
            '2,2,2\nright\nmerged\nend merged\n',
1354
 
            GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
 
1272
            GzipFile(mode='rb', fileobj=BytesIO(delta_data)).read())
1355
1273
 
1356
1274
    def test_annotated_to_fulltext_no_eol(self):
1357
1275
        """Test adapting annotated knits to full texts (for -> weaves)."""
1463
1381
class TestVersionedFiles(TestCaseWithMemoryTransport):
1464
1382
    """Tests for the multiple-file variant of VersionedFile."""
1465
1383
 
 
1384
    # We want to be sure of behaviour for:
 
1385
    # weaves prefix layout (weave texts)
 
1386
    # individually named weaves (weave inventories)
 
1387
    # annotated knits - prefix|hash|hash-escape layout, we test the third only
 
1388
    #                   as it is the most complex mapper.
 
1389
    # individually named knits
 
1390
    # individual no-graph knits in packs (signatures)
 
1391
    # individual graph knits in packs (inventories)
 
1392
    # individual graph nocompression knits in packs (revisions)
 
1393
    # plain text knits in packs (texts)
 
1394
    len_one_scenarios = [
 
1395
        ('weave-named', {
 
1396
            'cleanup':None,
 
1397
            'factory':make_versioned_files_factory(WeaveFile,
 
1398
                ConstantMapper('inventory')),
 
1399
            'graph':True,
 
1400
            'key_length':1,
 
1401
            'support_partial_insertion': False,
 
1402
            }),
 
1403
        ('named-knit', {
 
1404
            'cleanup':None,
 
1405
            'factory':make_file_factory(False, ConstantMapper('revisions')),
 
1406
            'graph':True,
 
1407
            'key_length':1,
 
1408
            'support_partial_insertion': False,
 
1409
            }),
 
1410
        ('named-nograph-nodelta-knit-pack', {
 
1411
            'cleanup':cleanup_pack_knit,
 
1412
            'factory':make_pack_factory(False, False, 1),
 
1413
            'graph':False,
 
1414
            'key_length':1,
 
1415
            'support_partial_insertion': False,
 
1416
            }),
 
1417
        ('named-graph-knit-pack', {
 
1418
            'cleanup':cleanup_pack_knit,
 
1419
            'factory':make_pack_factory(True, True, 1),
 
1420
            'graph':True,
 
1421
            'key_length':1,
 
1422
            'support_partial_insertion': True,
 
1423
            }),
 
1424
        ('named-graph-nodelta-knit-pack', {
 
1425
            'cleanup':cleanup_pack_knit,
 
1426
            'factory':make_pack_factory(True, False, 1),
 
1427
            'graph':True,
 
1428
            'key_length':1,
 
1429
            'support_partial_insertion': False,
 
1430
            }),
 
1431
        ('groupcompress-nograph', {
 
1432
            'cleanup':groupcompress.cleanup_pack_group,
 
1433
            'factory':groupcompress.make_pack_factory(False, False, 1),
 
1434
            'graph': False,
 
1435
            'key_length':1,
 
1436
            'support_partial_insertion':False,
 
1437
            }),
 
1438
        ]
 
1439
    len_two_scenarios = [
 
1440
        ('weave-prefix', {
 
1441
            'cleanup':None,
 
1442
            'factory':make_versioned_files_factory(WeaveFile,
 
1443
                PrefixMapper()),
 
1444
            'graph':True,
 
1445
            'key_length':2,
 
1446
            'support_partial_insertion': False,
 
1447
            }),
 
1448
        ('annotated-knit-escape', {
 
1449
            'cleanup':None,
 
1450
            'factory':make_file_factory(True, HashEscapedPrefixMapper()),
 
1451
            'graph':True,
 
1452
            'key_length':2,
 
1453
            'support_partial_insertion': False,
 
1454
            }),
 
1455
        ('plain-knit-pack', {
 
1456
            'cleanup':cleanup_pack_knit,
 
1457
            'factory':make_pack_factory(True, True, 2),
 
1458
            'graph':True,
 
1459
            'key_length':2,
 
1460
            'support_partial_insertion': True,
 
1461
            }),
 
1462
        ('groupcompress', {
 
1463
            'cleanup':groupcompress.cleanup_pack_group,
 
1464
            'factory':groupcompress.make_pack_factory(True, False, 1),
 
1465
            'graph': True,
 
1466
            'key_length':1,
 
1467
            'support_partial_insertion':False,
 
1468
            }),
 
1469
        ]
 
1470
 
 
1471
    scenarios = len_one_scenarios + len_two_scenarios
 
1472
 
1466
1473
    def get_versionedfiles(self, relpath='files'):
1467
1474
        transport = self.get_transport(relpath)
1468
1475
        if relpath != '.':
1479
1486
        else:
1480
1487
            return ('FileA',) + (suffix,)
1481
1488
 
 
1489
    def test_add_fallback_implies_without_fallbacks(self):
 
1490
        f = self.get_versionedfiles('files')
 
1491
        if getattr(f, 'add_fallback_versioned_files', None) is None:
 
1492
            raise TestNotApplicable("%s doesn't support fallbacks"
 
1493
                                    % (f.__class__.__name__,))
 
1494
        g = self.get_versionedfiles('fallback')
 
1495
        key_a = self.get_simple_key('a')
 
1496
        g.add_lines(key_a, [], ['\n'])
 
1497
        f.add_fallback_versioned_files(g)
 
1498
        self.assertTrue(key_a in f.get_parent_map([key_a]))
 
1499
        self.assertFalse(key_a in f.without_fallbacks().get_parent_map([key_a]))
 
1500
 
1482
1501
    def test_add_lines(self):
1483
1502
        f = self.get_versionedfiles()
1484
1503
        key0 = self.get_simple_key('r0')
1620
1639
                vf._add_text, new_key, [], ''.join(lines),
1621
1640
                nostore_sha=sha)
1622
1641
            # and no new version should have been added.
1623
 
            record = vf.get_record_stream([new_key], 'unordered', True).next()
 
1642
            record = next(vf.get_record_stream([new_key], 'unordered', True))
1624
1643
            self.assertEqual('absent', record.storage_kind)
1625
1644
 
1626
1645
    def test_add_lines_nostoresha(self):
1678
1697
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1679
1698
                results)
1680
1699
            # Check the added items got CHK keys.
1681
 
            self.assertEqual(set([
 
1700
            self.assertEqual({
1682
1701
                ('sha1:00e364d235126be43292ab09cb4686cf703ddc17',),
1683
1702
                ('sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',),
1684
1703
                ('sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',),
1685
1704
                ('sha1:a8478686da38e370e32e42e8a0c220e33ee9132f',),
1686
1705
                ('sha1:ed8bce375198ea62444dc71952b22cfc2b09226d',),
1687
 
                ]),
 
1706
                },
1688
1707
                files.keys())
1689
1708
        elif self.key_length == 2:
1690
1709
            self.assertEqual([
1700
1719
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1701
1720
                results)
1702
1721
            # Check the added items got CHK keys.
1703
 
            self.assertEqual(set([
 
1722
            self.assertEqual({
1704
1723
                ('FileA', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1705
1724
                ('FileA', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1706
1725
                ('FileA', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1711
1730
                ('FileB', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1712
1731
                ('FileB', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1713
1732
                ('FileB', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1714
 
                ]),
 
1733
                },
1715
1734
                files.keys())
1716
1735
 
1717
1736
    def test_empty_lines(self):
1985
2004
        key = self.get_simple_key('foo')
1986
2005
        files.add_lines(key, (), ['my text\n', 'content'])
1987
2006
        stream = files.get_record_stream([key], 'unordered', False)
1988
 
        record = stream.next()
 
2007
        record = next(stream)
1989
2008
        if record.storage_kind in ('chunked', 'fulltext'):
1990
2009
            # chunked and fulltext representations are for direct use not wire
1991
2010
            # serialisation: check they are able to be used directly. To send
2012
2031
        :param records: A list to collect the seen records.
2013
2032
        :return: A generator of the records in stream.
2014
2033
        """
2015
 
        # We make assertions during copying to catch things early for
2016
 
        # easier debugging.
2017
 
        for record, ref_record in izip(stream, expected):
 
2034
        # We make assertions during copying to catch things early for easier
 
2035
        # debugging. This must use the iterating zip() from the future.
 
2036
        for record, ref_record in zip(stream, expected):
2018
2037
            records.append(record)
2019
2038
            self.assertEqual(ref_record.key, record.key)
2020
2039
            self.assertEqual(ref_record.storage_kind, record.storage_kind)
2428
2447
        origin_entries = source.get_record_stream(origin_keys, 'unordered', False)
2429
2448
        end_entries = source.get_record_stream(end_keys, 'topological', False)
2430
2449
        start_entries = source.get_record_stream(start_keys, 'topological', False)
2431
 
        entries = chain(origin_entries, end_entries, start_entries)
 
2450
        entries = itertools.chain(origin_entries, end_entries, start_entries)
2432
2451
        try:
2433
2452
            files.insert_record_stream(entries)
2434
2453
        except RevisionNotPresent:
2460
2479
        streams = []
2461
2480
        for key in reversed(keys):
2462
2481
            streams.append(source.get_record_stream([key], 'unordered', False))
2463
 
        deltas = chain(*streams[:-1])
 
2482
        deltas = itertools.chain.from_iterable(streams[:-1])
2464
2483
        files = self.get_versionedfiles()
2465
2484
        try:
2466
2485
            files.insert_record_stream(deltas)
2500
2519
                list(files.get_missing_compression_parent_keys()))
2501
2520
            files.insert_record_stream(entries)
2502
2521
            missing_bases = files.get_missing_compression_parent_keys()
2503
 
            self.assertEqual(set([self.get_simple_key('left')]),
 
2522
            self.assertEqual({self.get_simple_key('left')},
2504
2523
                set(missing_bases))
2505
2524
            self.assertEqual(set(keys), set(files.get_parent_map(keys)))
2506
2525
        else:
2524
2543
        files = self.get_versionedfiles()
2525
2544
        files.insert_record_stream(entries)
2526
2545
        missing_bases = files.get_missing_compression_parent_keys()
2527
 
        self.assertEqual(set([self.get_simple_key('left')]),
 
2546
        self.assertEqual({self.get_simple_key('left')},
2528
2547
            set(missing_bases))
2529
2548
        # 'merged' is inserted (although a commit of a write group involving
2530
2549
        # this versionedfiles would fail).
2615
2634
            lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2616
2635
 
2617
2636
    def test_make_mpdiffs(self):
2618
 
        from bzrlib import multiparent
 
2637
        from breezy import multiparent
2619
2638
        files = self.get_versionedfiles('source')
2620
2639
        # add texts that should trip the knit maximum delta chain threshold
2621
2640
        # as well as doing parallel chains of data in knits.
2717
2736
        else:
2718
2737
            key = ('foo', 'bar',)
2719
2738
        files.add_lines(key, (), [])
2720
 
        self.assertEqual(set([key]), set(files.keys()))
 
2739
        self.assertEqual({key}, set(files.keys()))
2721
2740
 
2722
2741
 
2723
2742
class VirtualVersionedFilesTests(TestCase):
2731
2750
        return ret
2732
2751
 
2733
2752
    def setUp(self):
2734
 
        TestCase.setUp(self)
 
2753
        super(VirtualVersionedFilesTests, self).setUp()
2735
2754
        self._lines = {}
2736
2755
        self._parent_map = {}
2737
2756
        self.texts = VirtualVersionedFiles(self._get_parent_map,
2753
2772
                          [])
2754
2773
 
2755
2774
    def test_get_sha1s_nonexistent(self):
2756
 
        self.assertEquals({}, self.texts.get_sha1s([("NONEXISTENT",)]))
 
2775
        self.assertEqual({}, self.texts.get_sha1s([("NONEXISTENT",)]))
2757
2776
 
2758
2777
    def test_get_sha1s(self):
2759
2778
        self._lines["key"] = ["dataline1", "dataline2"]
2760
 
        self.assertEquals({("key",): osutils.sha_strings(self._lines["key"])},
 
2779
        self.assertEqual({("key",): osutils.sha_strings(self._lines["key"])},
2761
2780
                           self.texts.get_sha1s([("key",)]))
2762
2781
 
2763
2782
    def test_get_parent_map(self):
2764
2783
        self._parent_map = {"G": ("A", "B")}
2765
 
        self.assertEquals({("G",): (("A",),("B",))},
 
2784
        self.assertEqual({("G",): (("A",),("B",))},
2766
2785
                          self.texts.get_parent_map([("G",), ("L",)]))
2767
2786
 
2768
2787
    def test_get_record_stream(self):
2769
2788
        self._lines["A"] = ["FOO", "BAR"]
2770
2789
        it = self.texts.get_record_stream([("A",)], "unordered", True)
2771
 
        record = it.next()
2772
 
        self.assertEquals("chunked", record.storage_kind)
2773
 
        self.assertEquals("FOOBAR", record.get_bytes_as("fulltext"))
2774
 
        self.assertEquals(["FOO", "BAR"], record.get_bytes_as("chunked"))
 
2790
        record = next(it)
 
2791
        self.assertEqual("chunked", record.storage_kind)
 
2792
        self.assertEqual("FOOBAR", record.get_bytes_as("fulltext"))
 
2793
        self.assertEqual(["FOO", "BAR"], record.get_bytes_as("chunked"))
2775
2794
 
2776
2795
    def test_get_record_stream_absent(self):
2777
2796
        it = self.texts.get_record_stream([("A",)], "unordered", True)
2778
 
        record = it.next()
2779
 
        self.assertEquals("absent", record.storage_kind)
 
2797
        record = next(it)
 
2798
        self.assertEqual("absent", record.storage_kind)
2780
2799
 
2781
2800
    def test_iter_lines_added_or_present_in_keys(self):
2782
2801
        self._lines["A"] = ["FOO", "BAR"]
2783
2802
        self._lines["B"] = ["HEY"]
2784
2803
        self._lines["C"] = ["Alberta"]
2785
2804
        it = self.texts.iter_lines_added_or_present_in_keys([("A",), ("B",)])
2786
 
        self.assertEquals(sorted([("FOO", "A"), ("BAR", "A"), ("HEY", "B")]),
 
2805
        self.assertEqual(sorted([("FOO", "A"), ("BAR", "A"), ("HEY", "B")]),
2787
2806
            sorted(list(it)))
2788
2807
 
2789
2808