/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/tests/per_versionedfile.py

  • Committer: Jelmer Vernooij
  • Date: 2017-10-26 01:03:20 UTC
  • mfrom: (6793.1.1 install)
  • Revision ID: jelmer@jelmer.uk-20171026010320-k9ktedb4il8r6ug6
Merge lp:~jelmer/brz/install.

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006-2010 Canonical Ltd
 
1
# Copyright (C) 2006-2012, 2016 Canonical Ltd
2
2
#
3
3
# Authors:
4
4
#   Johan Rydberg <jrydberg@gnu.org>
21
21
# TODO: might be nice to create a versionedfile with some type of corruption
22
22
# considered typical and check that it can be detected/corrected.
23
23
 
24
 
from itertools import chain, izip
25
 
from StringIO import StringIO
 
24
from gzip import GzipFile
 
25
import itertools
26
26
 
27
 
from bzrlib import (
 
27
from .. import (
28
28
    errors,
29
29
    graph as _mod_graph,
30
 
    groupcompress,
31
 
    knit as _mod_knit,
32
30
    osutils,
33
31
    progress,
 
32
    transport,
34
33
    ui,
35
34
    )
36
 
from bzrlib.errors import (
37
 
                           RevisionNotPresent,
38
 
                           RevisionAlreadyPresent,
39
 
                           WeaveParentMismatch
40
 
                           )
41
 
from bzrlib.knit import (
 
35
from ..bzr import (
 
36
    groupcompress,
 
37
    knit as _mod_knit,
 
38
    )
 
39
from ..errors import (
 
40
    RevisionNotPresent,
 
41
    RevisionAlreadyPresent,
 
42
    )
 
43
from ..bzr.knit import (
42
44
    cleanup_pack_knit,
43
45
    make_file_factory,
44
46
    make_pack_factory,
45
 
    KnitAnnotateFactory,
46
 
    KnitPlainFactory,
47
 
    )
48
 
from bzrlib.tests import (
 
47
    )
 
48
from ..sixish import (
 
49
    BytesIO,
 
50
    zip,
 
51
    )
 
52
from . import (
49
53
    TestCase,
50
54
    TestCaseWithMemoryTransport,
51
55
    TestNotApplicable,
52
56
    TestSkipped,
53
 
    condition_isinstance,
54
 
    split_suite_by_condition,
55
 
    multiply_tests,
56
57
    )
57
 
from bzrlib.tests.http_utils import TestCaseWithWebserver
58
 
from bzrlib.trace import mutter
59
 
from bzrlib.transport import get_transport
60
 
from bzrlib.transport.memory import MemoryTransport
61
 
from bzrlib.tsort import topo_sort
62
 
from bzrlib.tuned_gzip import GzipFile
63
 
import bzrlib.versionedfile as versionedfile
64
 
from bzrlib.versionedfile import (
 
58
from .http_utils import TestCaseWithWebserver
 
59
from ..transport.memory import MemoryTransport
 
60
from ..bzr import versionedfile as versionedfile
 
61
from ..bzr.versionedfile import (
65
62
    ConstantMapper,
66
63
    HashEscapedPrefixMapper,
67
64
    PrefixMapper,
68
65
    VirtualVersionedFiles,
69
66
    make_versioned_files_factory,
70
67
    )
71
 
from bzrlib.weave import WeaveFile
72
 
from bzrlib.weavefile import read_weave, write_weave
73
 
 
74
 
 
75
 
def load_tests(standard_tests, module, loader):
76
 
    """Parameterize VersionedFiles tests for different implementations."""
77
 
    to_adapt, result = split_suite_by_condition(
78
 
        standard_tests, condition_isinstance(TestVersionedFiles))
79
 
    # We want to be sure of behaviour for:
80
 
    # weaves prefix layout (weave texts)
81
 
    # individually named weaves (weave inventories)
82
 
    # annotated knits - prefix|hash|hash-escape layout, we test the third only
83
 
    #                   as it is the most complex mapper.
84
 
    # individually named knits
85
 
    # individual no-graph knits in packs (signatures)
86
 
    # individual graph knits in packs (inventories)
87
 
    # individual graph nocompression knits in packs (revisions)
88
 
    # plain text knits in packs (texts)
89
 
    len_one_scenarios = [
90
 
        ('weave-named', {
91
 
            'cleanup':None,
92
 
            'factory':make_versioned_files_factory(WeaveFile,
93
 
                ConstantMapper('inventory')),
94
 
            'graph':True,
95
 
            'key_length':1,
96
 
            'support_partial_insertion': False,
97
 
            }),
98
 
        ('named-knit', {
99
 
            'cleanup':None,
100
 
            'factory':make_file_factory(False, ConstantMapper('revisions')),
101
 
            'graph':True,
102
 
            'key_length':1,
103
 
            'support_partial_insertion': False,
104
 
            }),
105
 
        ('named-nograph-nodelta-knit-pack', {
106
 
            'cleanup':cleanup_pack_knit,
107
 
            'factory':make_pack_factory(False, False, 1),
108
 
            'graph':False,
109
 
            'key_length':1,
110
 
            'support_partial_insertion': False,
111
 
            }),
112
 
        ('named-graph-knit-pack', {
113
 
            'cleanup':cleanup_pack_knit,
114
 
            'factory':make_pack_factory(True, True, 1),
115
 
            'graph':True,
116
 
            'key_length':1,
117
 
            'support_partial_insertion': True,
118
 
            }),
119
 
        ('named-graph-nodelta-knit-pack', {
120
 
            'cleanup':cleanup_pack_knit,
121
 
            'factory':make_pack_factory(True, False, 1),
122
 
            'graph':True,
123
 
            'key_length':1,
124
 
            'support_partial_insertion': False,
125
 
            }),
126
 
        ('groupcompress-nograph', {
127
 
            'cleanup':groupcompress.cleanup_pack_group,
128
 
            'factory':groupcompress.make_pack_factory(False, False, 1),
129
 
            'graph': False,
130
 
            'key_length':1,
131
 
            'support_partial_insertion':False,
132
 
            }),
133
 
        ]
134
 
    len_two_scenarios = [
135
 
        ('weave-prefix', {
136
 
            'cleanup':None,
137
 
            'factory':make_versioned_files_factory(WeaveFile,
138
 
                PrefixMapper()),
139
 
            'graph':True,
140
 
            'key_length':2,
141
 
            'support_partial_insertion': False,
142
 
            }),
143
 
        ('annotated-knit-escape', {
144
 
            'cleanup':None,
145
 
            'factory':make_file_factory(True, HashEscapedPrefixMapper()),
146
 
            'graph':True,
147
 
            'key_length':2,
148
 
            'support_partial_insertion': False,
149
 
            }),
150
 
        ('plain-knit-pack', {
151
 
            'cleanup':cleanup_pack_knit,
152
 
            'factory':make_pack_factory(True, True, 2),
153
 
            'graph':True,
154
 
            'key_length':2,
155
 
            'support_partial_insertion': True,
156
 
            }),
157
 
        ('groupcompress', {
158
 
            'cleanup':groupcompress.cleanup_pack_group,
159
 
            'factory':groupcompress.make_pack_factory(True, False, 1),
160
 
            'graph': True,
161
 
            'key_length':1,
162
 
            'support_partial_insertion':False,
163
 
            }),
164
 
        ]
165
 
    scenarios = len_one_scenarios + len_two_scenarios
166
 
    return multiply_tests(to_adapt, scenarios, result)
 
68
from ..bzr.weave import (
 
69
    WeaveFile,
 
70
    WeaveInvalidChecksum,
 
71
    )
 
72
from ..bzr.weavefile import write_weave
 
73
from .scenarios import load_tests_apply_scenarios
 
74
 
 
75
 
 
76
load_tests = load_tests_apply_scenarios
167
77
 
168
78
 
169
79
def get_diamond_vf(f, trailing_eol=True, left_only=False):
280
190
            versions = f.versions()
281
191
            self.assertTrue('r0' in versions)
282
192
            self.assertTrue('r1' in versions)
283
 
            self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
284
 
            self.assertEquals(f.get_text('r0'), 'a\nb\n')
285
 
            self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
 
193
            self.assertEqual(f.get_lines('r0'), ['a\n', 'b\n'])
 
194
            self.assertEqual(f.get_text('r0'), 'a\nb\n')
 
195
            self.assertEqual(f.get_lines('r1'), ['b\n', 'c\n'])
286
196
            self.assertEqual(2, len(f))
287
197
            self.assertEqual(2, f.num_versions())
288
198
 
314
224
            self.assertTrue('r0' in versions)
315
225
            self.assertTrue('r1' in versions)
316
226
            self.assertTrue('r2' in versions)
317
 
            self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
318
 
            self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
319
 
            self.assertEquals(f.get_lines('r2'), ['c\n', 'd\n'])
 
227
            self.assertEqual(f.get_lines('r0'), ['a\n', 'b\n'])
 
228
            self.assertEqual(f.get_lines('r1'), ['b\n', 'c\n'])
 
229
            self.assertEqual(f.get_lines('r2'), ['c\n', 'd\n'])
320
230
            self.assertEqual(3, f.num_versions())
321
231
            origins = f.annotate('r1')
322
 
            self.assertEquals(origins[0][0], 'r0')
323
 
            self.assertEquals(origins[1][0], 'r1')
 
232
            self.assertEqual(origins[0][0], 'r0')
 
233
            self.assertEqual(origins[1][0], 'r1')
324
234
            origins = f.annotate('r2')
325
 
            self.assertEquals(origins[0][0], 'r1')
326
 
            self.assertEquals(origins[1][0], 'r2')
 
235
            self.assertEqual(origins[0][0], 'r1')
 
236
            self.assertEqual(origins[1][0], 'r2')
327
237
 
328
238
        verify_file(f)
329
239
        f = self.reopen_file()
503
413
 
504
414
    def test_add_lines_with_matching_blocks_noeol_last_line(self):
505
415
        """Add a text with an unchanged last line with no eol should work."""
506
 
        from bzrlib import multiparent
 
416
        from breezy import multiparent
507
417
        # Hand verified sha1 of the text we're adding.
508
418
        sha1 = '6a1d115ec7b60afb664dc14890b5af5ce3c827a4'
509
419
        # Create a mpdiff which adds a new line before the trailing line, and
524
434
        self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
525
435
 
526
436
    def test_make_mpdiffs(self):
527
 
        from bzrlib import multiparent
 
437
        from breezy import multiparent
528
438
        vf = self.get_file('foo')
529
439
        sha1s = self._setup_for_deltas(vf)
530
440
        new_vf = self.get_file('bar')
693
603
        f.add_lines('r0', [], ['a\n', 'b\n'])
694
604
        f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
695
605
        origins = f.annotate('r1')
696
 
        self.assertEquals(origins[0][0], 'r1')
697
 
        self.assertEquals(origins[1][0], 'r0')
 
606
        self.assertEqual(origins[0][0], 'r1')
 
607
        self.assertEqual(origins[1][0], 'r0')
698
608
 
699
609
        self.assertRaises(RevisionNotPresent,
700
610
            f.annotate, 'foo')
709
619
        w = self.get_file_corrupted_text()
710
620
 
711
621
        self.assertEqual('hello\n', w.get_text('v1'))
712
 
        self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
713
 
        self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
714
 
        self.assertRaises(errors.WeaveInvalidChecksum, w.check)
 
622
        self.assertRaises(WeaveInvalidChecksum, w.get_text, 'v2')
 
623
        self.assertRaises(WeaveInvalidChecksum, w.get_lines, 'v2')
 
624
        self.assertRaises(WeaveInvalidChecksum, w.check)
715
625
 
716
626
        w = self.get_file_corrupted_checksum()
717
627
 
718
628
        self.assertEqual('hello\n', w.get_text('v1'))
719
 
        self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
720
 
        self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
721
 
        self.assertRaises(errors.WeaveInvalidChecksum, w.check)
 
629
        self.assertRaises(WeaveInvalidChecksum, w.get_text, 'v2')
 
630
        self.assertRaises(WeaveInvalidChecksum, w.get_lines, 'v2')
 
631
        self.assertRaises(WeaveInvalidChecksum, w.check)
722
632
 
723
633
    def get_file_corrupted_text(self):
724
634
        """Return a versioned file with corrupt text but valid metadata."""
844
754
                                 ['base', 'a_ghost'],
845
755
                                 ['line\n', 'line_b\n', 'line_c\n'])
846
756
        origins = vf.annotate('references_ghost')
847
 
        self.assertEquals(('base', 'line\n'), origins[0])
848
 
        self.assertEquals(('base', 'line_b\n'), origins[1])
849
 
        self.assertEquals(('references_ghost', 'line_c\n'), origins[2])
 
757
        self.assertEqual(('base', 'line\n'), origins[0])
 
758
        self.assertEqual(('base', 'line_b\n'), origins[1])
 
759
        self.assertEqual(('references_ghost', 'line_c\n'), origins[2])
850
760
 
851
761
    def test_readonly_mode(self):
852
 
        transport = get_transport(self.get_url('.'))
 
762
        t = self.get_transport()
853
763
        factory = self.get_factory()
854
 
        vf = factory('id', transport, 0777, create=True, access_mode='w')
855
 
        vf = factory('id', transport, access_mode='r')
 
764
        vf = factory('id', t, 0o777, create=True, access_mode='w')
 
765
        vf = factory('id', t, access_mode='r')
856
766
        self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
857
767
        self.assertRaises(errors.ReadOnlyError,
858
768
                          vf.add_lines_with_ghosts,
880
790
class TestWeave(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
881
791
 
882
792
    def get_file(self, name='foo'):
883
 
        return WeaveFile(name, get_transport(self.get_url('.')), create=True,
884
 
            get_scope=self.get_transaction)
 
793
        return WeaveFile(name, self.get_transport(),
 
794
                         create=True,
 
795
                         get_scope=self.get_transaction)
885
796
 
886
797
    def get_file_corrupted_text(self):
887
 
        w = WeaveFile('foo', get_transport(self.get_url('.')), create=True,
888
 
            get_scope=self.get_transaction)
 
798
        w = WeaveFile('foo', self.get_transport(),
 
799
                      create=True,
 
800
                      get_scope=self.get_transaction)
889
801
        w.add_lines('v1', [], ['hello\n'])
890
802
        w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
891
803
 
919
831
        return w
920
832
 
921
833
    def reopen_file(self, name='foo', create=False):
922
 
        return WeaveFile(name, get_transport(self.get_url('.')), create=create,
923
 
            get_scope=self.get_transaction)
 
834
        return WeaveFile(name, self.get_transport(),
 
835
                         create=create,
 
836
                         get_scope=self.get_transaction)
924
837
 
925
838
    def test_no_implicit_create(self):
926
839
        self.assertRaises(errors.NoSuchFile,
927
840
                          WeaveFile,
928
841
                          'foo',
929
 
                          get_transport(self.get_url('.')),
 
842
                          self.get_transport(),
930
843
                          get_scope=self.get_transaction)
931
844
 
932
845
    def get_factory(self):
936
849
class TestPlanMergeVersionedFile(TestCaseWithMemoryTransport):
937
850
 
938
851
    def setUp(self):
939
 
        TestCaseWithMemoryTransport.setUp(self)
 
852
        super(TestPlanMergeVersionedFile, self).setUp()
940
853
        mapper = PrefixMapper()
941
854
        factory = make_file_factory(True, mapper)
942
855
        self.vf1 = factory(self.get_transport('root-1'))
982
895
    def test_get_record_stream(self):
983
896
        self.setup_abcde()
984
897
        def get_record(suffix):
985
 
            return self.plan_merge_vf.get_record_stream(
986
 
                [('root', suffix)], 'unordered', True).next()
 
898
            return next(self.plan_merge_vf.get_record_stream(
 
899
                [('root', suffix)], 'unordered', True))
987
900
        self.assertEqual('a', get_record('A').get_bytes_as('fulltext'))
988
901
        self.assertEqual('c', get_record('C').get_bytes_as('fulltext'))
989
902
        self.assertEqual('e', get_record('E:').get_bytes_as('fulltext'))
999
912
        # we should be able to read from http with a versioned file.
1000
913
        vf = self.get_file()
1001
914
        # try an empty file access
1002
 
        readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
 
915
        readonly_vf = self.get_factory()('foo',
 
916
            transport.get_transport_from_url(self.get_readonly_url('.')))
1003
917
        self.assertEqual([], readonly_vf.versions())
 
918
 
 
919
    def test_readonly_http_works_with_feeling(self):
 
920
        # we should be able to read from http with a versioned file.
 
921
        vf = self.get_file()
1004
922
        # now with feeling.
1005
923
        vf.add_lines('1', [], ['a\n'])
1006
924
        vf.add_lines('2', ['1'], ['b\n', 'a\n'])
1007
 
        readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
 
925
        readonly_vf = self.get_factory()('foo',
 
926
            transport.get_transport_from_url(self.get_readonly_url('.')))
1008
927
        self.assertEqual(['1', '2'], vf.versions())
 
928
        self.assertEqual(['1', '2'], readonly_vf.versions())
1009
929
        for version in readonly_vf.versions():
1010
930
            readonly_vf.get_lines(version)
1011
931
 
1013
933
class TestWeaveHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
1014
934
 
1015
935
    def get_file(self):
1016
 
        return WeaveFile('foo', get_transport(self.get_url('.')), create=True,
1017
 
            get_scope=self.get_transaction)
 
936
        return WeaveFile('foo', self.get_transport(),
 
937
                         create=True,
 
938
                         get_scope=self.get_transaction)
1018
939
 
1019
940
    def get_factory(self):
1020
941
        return WeaveFile
1023
944
class MergeCasesMixin(object):
1024
945
 
1025
946
    def doMerge(self, base, a, b, mp):
1026
 
        from cStringIO import StringIO
1027
947
        from textwrap import dedent
1028
948
 
1029
949
        def addcrlf(x):
1030
950
            return x + '\n'
1031
951
 
1032
952
        w = self.get_file()
1033
 
        w.add_lines('text0', [], map(addcrlf, base))
1034
 
        w.add_lines('text1', ['text0'], map(addcrlf, a))
1035
 
        w.add_lines('text2', ['text0'], map(addcrlf, b))
 
953
        w.add_lines('text0', [], list(map(addcrlf, base)))
 
954
        w.add_lines('text1', ['text0'], list(map(addcrlf, a)))
 
955
        w.add_lines('text2', ['text0'], list(map(addcrlf, b)))
1036
956
 
1037
957
        self.log_contents(w)
1038
958
 
1043
963
                self.log('%12s | %s' % (state, line[:-1]))
1044
964
 
1045
965
        self.log('merge:')
1046
 
        mt = StringIO()
 
966
        mt = BytesIO()
1047
967
        mt.writelines(w.weave_merge(p))
1048
968
        mt.seek(0)
1049
969
        self.log(mt.getvalue())
1050
970
 
1051
 
        mp = map(addcrlf, mp)
 
971
        mp = list(map(addcrlf, mp))
1052
972
        self.assertEqual(mt.readlines(), mp)
1053
973
 
1054
974
 
1264
1184
class TestWeaveMerge(TestCaseWithMemoryTransport, MergeCasesMixin):
1265
1185
 
1266
1186
    def get_file(self, name='foo'):
1267
 
        return WeaveFile(name, get_transport(self.get_url('.')), create=True)
 
1187
        return WeaveFile(name, self.get_transport(),
 
1188
                         create=True)
1268
1189
 
1269
1190
    def log_contents(self, w):
1270
1191
        self.log('weave is:')
1271
 
        tmpf = StringIO()
 
1192
        tmpf = BytesIO()
1272
1193
        write_weave(w, tmpf)
1273
1194
        self.log(tmpf.getvalue())
1274
1195
 
1309
1230
        """Grab the interested adapted texts for tests."""
1310
1231
        # origin is a fulltext
1311
1232
        entries = f.get_record_stream([('origin',)], 'unordered', False)
1312
 
        base = entries.next()
 
1233
        base = next(entries)
1313
1234
        ft_data = ft_adapter.get_bytes(base)
1314
1235
        # merged is both a delta and multiple parents.
1315
1236
        entries = f.get_record_stream([('merged',)], 'unordered', False)
1316
 
        merged = entries.next()
 
1237
        merged = next(entries)
1317
1238
        delta_data = delta_adapter.get_bytes(merged)
1318
1239
        return ft_data, delta_data
1319
1240
 
1329
1250
            'version origin 1 b284f94827db1fa2970d9e2014f080413b547a7e\n'
1330
1251
            'origin\n'
1331
1252
            'end origin\n',
1332
 
            GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
 
1253
            GzipFile(mode='rb', fileobj=BytesIO(ft_data)).read())
1333
1254
        self.assertEqual(
1334
1255
            'version merged 4 32c2e79763b3f90e8ccde37f9710b6629c25a796\n'
1335
1256
            '1,2,3\nleft\nright\nmerged\nend merged\n',
1336
 
            GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
 
1257
            GzipFile(mode='rb', fileobj=BytesIO(delta_data)).read())
1337
1258
 
1338
1259
    def test_deannotation(self):
1339
1260
        """Test converting annotated knits to unannotated knits."""
1347
1268
            'version origin 1 00e364d235126be43292ab09cb4686cf703ddc17\n'
1348
1269
            'origin\n'
1349
1270
            'end origin\n',
1350
 
            GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
 
1271
            GzipFile(mode='rb', fileobj=BytesIO(ft_data)).read())
1351
1272
        self.assertEqual(
1352
1273
            'version merged 3 ed8bce375198ea62444dc71952b22cfc2b09226d\n'
1353
1274
            '2,2,2\nright\nmerged\nend merged\n',
1354
 
            GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
 
1275
            GzipFile(mode='rb', fileobj=BytesIO(delta_data)).read())
1355
1276
 
1356
1277
    def test_annotated_to_fulltext_no_eol(self):
1357
1278
        """Test adapting annotated knits to full texts (for -> weaves)."""
1463
1384
class TestVersionedFiles(TestCaseWithMemoryTransport):
1464
1385
    """Tests for the multiple-file variant of VersionedFile."""
1465
1386
 
 
1387
    # We want to be sure of behaviour for:
 
1388
    # weaves prefix layout (weave texts)
 
1389
    # individually named weaves (weave inventories)
 
1390
    # annotated knits - prefix|hash|hash-escape layout, we test the third only
 
1391
    #                   as it is the most complex mapper.
 
1392
    # individually named knits
 
1393
    # individual no-graph knits in packs (signatures)
 
1394
    # individual graph knits in packs (inventories)
 
1395
    # individual graph nocompression knits in packs (revisions)
 
1396
    # plain text knits in packs (texts)
 
1397
    len_one_scenarios = [
 
1398
        ('weave-named', {
 
1399
            'cleanup':None,
 
1400
            'factory':make_versioned_files_factory(WeaveFile,
 
1401
                ConstantMapper('inventory')),
 
1402
            'graph':True,
 
1403
            'key_length':1,
 
1404
            'support_partial_insertion': False,
 
1405
            }),
 
1406
        ('named-knit', {
 
1407
            'cleanup':None,
 
1408
            'factory':make_file_factory(False, ConstantMapper('revisions')),
 
1409
            'graph':True,
 
1410
            'key_length':1,
 
1411
            'support_partial_insertion': False,
 
1412
            }),
 
1413
        ('named-nograph-nodelta-knit-pack', {
 
1414
            'cleanup':cleanup_pack_knit,
 
1415
            'factory':make_pack_factory(False, False, 1),
 
1416
            'graph':False,
 
1417
            'key_length':1,
 
1418
            'support_partial_insertion': False,
 
1419
            }),
 
1420
        ('named-graph-knit-pack', {
 
1421
            'cleanup':cleanup_pack_knit,
 
1422
            'factory':make_pack_factory(True, True, 1),
 
1423
            'graph':True,
 
1424
            'key_length':1,
 
1425
            'support_partial_insertion': True,
 
1426
            }),
 
1427
        ('named-graph-nodelta-knit-pack', {
 
1428
            'cleanup':cleanup_pack_knit,
 
1429
            'factory':make_pack_factory(True, False, 1),
 
1430
            'graph':True,
 
1431
            'key_length':1,
 
1432
            'support_partial_insertion': False,
 
1433
            }),
 
1434
        ('groupcompress-nograph', {
 
1435
            'cleanup':groupcompress.cleanup_pack_group,
 
1436
            'factory':groupcompress.make_pack_factory(False, False, 1),
 
1437
            'graph': False,
 
1438
            'key_length':1,
 
1439
            'support_partial_insertion':False,
 
1440
            }),
 
1441
        ]
 
1442
    len_two_scenarios = [
 
1443
        ('weave-prefix', {
 
1444
            'cleanup':None,
 
1445
            'factory':make_versioned_files_factory(WeaveFile,
 
1446
                PrefixMapper()),
 
1447
            'graph':True,
 
1448
            'key_length':2,
 
1449
            'support_partial_insertion': False,
 
1450
            }),
 
1451
        ('annotated-knit-escape', {
 
1452
            'cleanup':None,
 
1453
            'factory':make_file_factory(True, HashEscapedPrefixMapper()),
 
1454
            'graph':True,
 
1455
            'key_length':2,
 
1456
            'support_partial_insertion': False,
 
1457
            }),
 
1458
        ('plain-knit-pack', {
 
1459
            'cleanup':cleanup_pack_knit,
 
1460
            'factory':make_pack_factory(True, True, 2),
 
1461
            'graph':True,
 
1462
            'key_length':2,
 
1463
            'support_partial_insertion': True,
 
1464
            }),
 
1465
        ('groupcompress', {
 
1466
            'cleanup':groupcompress.cleanup_pack_group,
 
1467
            'factory':groupcompress.make_pack_factory(True, False, 1),
 
1468
            'graph': True,
 
1469
            'key_length':1,
 
1470
            'support_partial_insertion':False,
 
1471
            }),
 
1472
        ]
 
1473
 
 
1474
    scenarios = len_one_scenarios + len_two_scenarios
 
1475
 
1466
1476
    def get_versionedfiles(self, relpath='files'):
1467
1477
        transport = self.get_transport(relpath)
1468
1478
        if relpath != '.':
1479
1489
        else:
1480
1490
            return ('FileA',) + (suffix,)
1481
1491
 
 
1492
    def test_add_fallback_implies_without_fallbacks(self):
 
1493
        f = self.get_versionedfiles('files')
 
1494
        if getattr(f, 'add_fallback_versioned_files', None) is None:
 
1495
            raise TestNotApplicable("%s doesn't support fallbacks"
 
1496
                                    % (f.__class__.__name__,))
 
1497
        g = self.get_versionedfiles('fallback')
 
1498
        key_a = self.get_simple_key('a')
 
1499
        g.add_lines(key_a, [], ['\n'])
 
1500
        f.add_fallback_versioned_files(g)
 
1501
        self.assertTrue(key_a in f.get_parent_map([key_a]))
 
1502
        self.assertFalse(key_a in f.without_fallbacks().get_parent_map([key_a]))
 
1503
 
1482
1504
    def test_add_lines(self):
1483
1505
        f = self.get_versionedfiles()
1484
1506
        key0 = self.get_simple_key('r0')
1620
1642
                vf._add_text, new_key, [], ''.join(lines),
1621
1643
                nostore_sha=sha)
1622
1644
            # and no new version should have been added.
1623
 
            record = vf.get_record_stream([new_key], 'unordered', True).next()
 
1645
            record = next(vf.get_record_stream([new_key], 'unordered', True))
1624
1646
            self.assertEqual('absent', record.storage_kind)
1625
1647
 
1626
1648
    def test_add_lines_nostoresha(self):
1678
1700
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1679
1701
                results)
1680
1702
            # Check the added items got CHK keys.
1681
 
            self.assertEqual(set([
 
1703
            self.assertEqual({
1682
1704
                ('sha1:00e364d235126be43292ab09cb4686cf703ddc17',),
1683
1705
                ('sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',),
1684
1706
                ('sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',),
1685
1707
                ('sha1:a8478686da38e370e32e42e8a0c220e33ee9132f',),
1686
1708
                ('sha1:ed8bce375198ea62444dc71952b22cfc2b09226d',),
1687
 
                ]),
 
1709
                },
1688
1710
                files.keys())
1689
1711
        elif self.key_length == 2:
1690
1712
            self.assertEqual([
1700
1722
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1701
1723
                results)
1702
1724
            # Check the added items got CHK keys.
1703
 
            self.assertEqual(set([
 
1725
            self.assertEqual({
1704
1726
                ('FileA', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1705
1727
                ('FileA', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1706
1728
                ('FileA', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1711
1733
                ('FileB', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1712
1734
                ('FileB', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1713
1735
                ('FileB', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1714
 
                ]),
 
1736
                },
1715
1737
                files.keys())
1716
1738
 
1717
1739
    def test_empty_lines(self):
1985
2007
        key = self.get_simple_key('foo')
1986
2008
        files.add_lines(key, (), ['my text\n', 'content'])
1987
2009
        stream = files.get_record_stream([key], 'unordered', False)
1988
 
        record = stream.next()
 
2010
        record = next(stream)
1989
2011
        if record.storage_kind in ('chunked', 'fulltext'):
1990
2012
            # chunked and fulltext representations are for direct use not wire
1991
2013
            # serialisation: check they are able to be used directly. To send
2012
2034
        :param records: A list to collect the seen records.
2013
2035
        :return: A generator of the records in stream.
2014
2036
        """
2015
 
        # We make assertions during copying to catch things early for
2016
 
        # easier debugging.
2017
 
        for record, ref_record in izip(stream, expected):
 
2037
        # We make assertions during copying to catch things early for easier
 
2038
        # debugging. This must use the iterating zip() from the future.
 
2039
        for record, ref_record in zip(stream, expected):
2018
2040
            records.append(record)
2019
2041
            self.assertEqual(ref_record.key, record.key)
2020
2042
            self.assertEqual(ref_record.storage_kind, record.storage_kind)
2428
2450
        origin_entries = source.get_record_stream(origin_keys, 'unordered', False)
2429
2451
        end_entries = source.get_record_stream(end_keys, 'topological', False)
2430
2452
        start_entries = source.get_record_stream(start_keys, 'topological', False)
2431
 
        entries = chain(origin_entries, end_entries, start_entries)
 
2453
        entries = itertools.chain(origin_entries, end_entries, start_entries)
2432
2454
        try:
2433
2455
            files.insert_record_stream(entries)
2434
2456
        except RevisionNotPresent:
2460
2482
        streams = []
2461
2483
        for key in reversed(keys):
2462
2484
            streams.append(source.get_record_stream([key], 'unordered', False))
2463
 
        deltas = chain(*streams[:-1])
 
2485
        deltas = itertools.chain.from_iterable(streams[:-1])
2464
2486
        files = self.get_versionedfiles()
2465
2487
        try:
2466
2488
            files.insert_record_stream(deltas)
2500
2522
                list(files.get_missing_compression_parent_keys()))
2501
2523
            files.insert_record_stream(entries)
2502
2524
            missing_bases = files.get_missing_compression_parent_keys()
2503
 
            self.assertEqual(set([self.get_simple_key('left')]),
 
2525
            self.assertEqual({self.get_simple_key('left')},
2504
2526
                set(missing_bases))
2505
2527
            self.assertEqual(set(keys), set(files.get_parent_map(keys)))
2506
2528
        else:
2524
2546
        files = self.get_versionedfiles()
2525
2547
        files.insert_record_stream(entries)
2526
2548
        missing_bases = files.get_missing_compression_parent_keys()
2527
 
        self.assertEqual(set([self.get_simple_key('left')]),
 
2549
        self.assertEqual({self.get_simple_key('left')},
2528
2550
            set(missing_bases))
2529
2551
        # 'merged' is inserted (although a commit of a write group involving
2530
2552
        # this versionedfiles would fail).
2615
2637
            lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2616
2638
 
2617
2639
    def test_make_mpdiffs(self):
2618
 
        from bzrlib import multiparent
 
2640
        from breezy import multiparent
2619
2641
        files = self.get_versionedfiles('source')
2620
2642
        # add texts that should trip the knit maximum delta chain threshold
2621
2643
        # as well as doing parallel chains of data in knits.
2717
2739
        else:
2718
2740
            key = ('foo', 'bar',)
2719
2741
        files.add_lines(key, (), [])
2720
 
        self.assertEqual(set([key]), set(files.keys()))
 
2742
        self.assertEqual({key}, set(files.keys()))
2721
2743
 
2722
2744
 
2723
2745
class VirtualVersionedFilesTests(TestCase):
2731
2753
        return ret
2732
2754
 
2733
2755
    def setUp(self):
2734
 
        TestCase.setUp(self)
 
2756
        super(VirtualVersionedFilesTests, self).setUp()
2735
2757
        self._lines = {}
2736
2758
        self._parent_map = {}
2737
2759
        self.texts = VirtualVersionedFiles(self._get_parent_map,
2753
2775
                          [])
2754
2776
 
2755
2777
    def test_get_sha1s_nonexistent(self):
2756
 
        self.assertEquals({}, self.texts.get_sha1s([("NONEXISTENT",)]))
 
2778
        self.assertEqual({}, self.texts.get_sha1s([("NONEXISTENT",)]))
2757
2779
 
2758
2780
    def test_get_sha1s(self):
2759
2781
        self._lines["key"] = ["dataline1", "dataline2"]
2760
 
        self.assertEquals({("key",): osutils.sha_strings(self._lines["key"])},
 
2782
        self.assertEqual({("key",): osutils.sha_strings(self._lines["key"])},
2761
2783
                           self.texts.get_sha1s([("key",)]))
2762
2784
 
2763
2785
    def test_get_parent_map(self):
2764
2786
        self._parent_map = {"G": ("A", "B")}
2765
 
        self.assertEquals({("G",): (("A",),("B",))},
 
2787
        self.assertEqual({("G",): (("A",),("B",))},
2766
2788
                          self.texts.get_parent_map([("G",), ("L",)]))
2767
2789
 
2768
2790
    def test_get_record_stream(self):
2769
2791
        self._lines["A"] = ["FOO", "BAR"]
2770
2792
        it = self.texts.get_record_stream([("A",)], "unordered", True)
2771
 
        record = it.next()
2772
 
        self.assertEquals("chunked", record.storage_kind)
2773
 
        self.assertEquals("FOOBAR", record.get_bytes_as("fulltext"))
2774
 
        self.assertEquals(["FOO", "BAR"], record.get_bytes_as("chunked"))
 
2793
        record = next(it)
 
2794
        self.assertEqual("chunked", record.storage_kind)
 
2795
        self.assertEqual("FOOBAR", record.get_bytes_as("fulltext"))
 
2796
        self.assertEqual(["FOO", "BAR"], record.get_bytes_as("chunked"))
2775
2797
 
2776
2798
    def test_get_record_stream_absent(self):
2777
2799
        it = self.texts.get_record_stream([("A",)], "unordered", True)
2778
 
        record = it.next()
2779
 
        self.assertEquals("absent", record.storage_kind)
 
2800
        record = next(it)
 
2801
        self.assertEqual("absent", record.storage_kind)
2780
2802
 
2781
2803
    def test_iter_lines_added_or_present_in_keys(self):
2782
2804
        self._lines["A"] = ["FOO", "BAR"]
2783
2805
        self._lines["B"] = ["HEY"]
2784
2806
        self._lines["C"] = ["Alberta"]
2785
2807
        it = self.texts.iter_lines_added_or_present_in_keys([("A",), ("B",)])
2786
 
        self.assertEquals(sorted([("FOO", "A"), ("BAR", "A"), ("HEY", "B")]),
 
2808
        self.assertEqual(sorted([("FOO", "A"), ("BAR", "A"), ("HEY", "B")]),
2787
2809
            sorted(list(it)))
2788
2810
 
2789
2811