/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar
6614.1.3 by Vincent Ladeuil
Fix assertEquals being deprecated by using assertEqual.
1
# Copyright (C) 2006-2012, 2016 Canonical Ltd
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
2
#
3
# Authors:
4
#   Johan Rydberg <jrydberg@gnu.org>
5
#
6
# This program is free software; you can redistribute it and/or modify
7
# it under the terms of the GNU General Public License as published by
8
# the Free Software Foundation; either version 2 of the License, or
9
# (at your option) any later version.
1887.1.1 by Adeodato Simó
Do not separate paragraphs in the copyright statement with blank lines,
10
#
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
11
# This program is distributed in the hope that it will be useful,
12
# but WITHOUT ANY WARRANTY; without even the implied warranty of
13
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14
# GNU General Public License for more details.
1887.1.1 by Adeodato Simó
Do not separate paragraphs in the copyright statement with blank lines,
15
#
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
16
# You should have received a copy of the GNU General Public License
17
# along with this program; if not, write to the Free Software
4183.7.1 by Sabin Iacob
update FSF mailing address
18
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
19
20
1704.2.15 by Martin Pool
Remove TODO about knit testing printed from test suite
21
# TODO: might be nice to create a versionedfile with some type of corruption
22
# considered typical and check that it can be detected/corrected.
23
5590.1.1 by John Arbash Meinel
Stop using tuned_gzip, it seems to give incorrect results on python 2.7
24
from gzip import GzipFile
6631.2.3 by Martin
Fix per_versionedfile test failures and rethink future_builtins
25
import itertools
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
26
6624 by Jelmer Vernooij
Merge Python3 porting work ('py3 pokes')
27
from .. import (
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
28
    errors,
4593.5.35 by John Arbash Meinel
Start working on a per-vf implementation test of find_ancestry.
29
    graph as _mod_graph,
3735.31.1 by John Arbash Meinel
Bring the groupcompress plugin into the brisbane-core branch.
30
    groupcompress,
4241.4.1 by Ian Clatworthy
add sha generation support to versionedfiles
31
    knit as _mod_knit,
2309.4.7 by John Arbash Meinel
Update VersionedFile tests to ensure that they can take Unicode,
32
    osutils,
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
33
    progress,
5273.1.7 by Vincent Ladeuil
No more use of the get_transport imported *symbol*, all uses are through
34
    transport,
4332.3.26 by Robert Collins
Allow passing keys to check to VersionedFile.check().
35
    ui,
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
36
    )
6624 by Jelmer Vernooij
Merge Python3 porting work ('py3 pokes')
37
from ..errors import (
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
38
                           RevisionNotPresent,
1563.2.11 by Robert Collins
Consolidate reweave and join as we have no separate usage, make reweave tests apply to all versionedfile implementations and deprecate the old reweave apis.
39
                           RevisionAlreadyPresent,
40
                           )
6624 by Jelmer Vernooij
Merge Python3 porting work ('py3 pokes')
41
from ..knit import (
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
42
    cleanup_pack_knit,
43
    make_file_factory,
44
    make_pack_factory,
2770.1.1 by Aaron Bentley
Initial implmentation of plain knit annotation
45
    )
6624 by Jelmer Vernooij
Merge Python3 porting work ('py3 pokes')
46
from ..sixish import (
6621.22.2 by Martin
Use BytesIO or StringIO from bzrlib.sixish
47
    BytesIO,
6631.2.3 by Martin
Fix per_versionedfile test failures and rethink future_builtins
48
    zip,
6621.22.2 by Martin
Use BytesIO or StringIO from bzrlib.sixish
49
    )
6624 by Jelmer Vernooij
Merge Python3 porting work ('py3 pokes')
50
from . import (
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
51
    TestCase,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
52
    TestCaseWithMemoryTransport,
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
53
    TestNotApplicable,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
54
    TestSkipped,
55
    )
6624 by Jelmer Vernooij
Merge Python3 porting work ('py3 pokes')
56
from .http_utils import TestCaseWithWebserver
57
from ..transport.memory import MemoryTransport
58
from .. import versionedfile as versionedfile
59
from ..versionedfile import (
3350.6.2 by Robert Collins
Prepare parameterised test environment.
60
    ConstantMapper,
61
    HashEscapedPrefixMapper,
62
    PrefixMapper,
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
63
    VirtualVersionedFiles,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
64
    make_versioned_files_factory,
65
    )
6624 by Jelmer Vernooij
Merge Python3 porting work ('py3 pokes')
66
from ..weave import WeaveFile
67
from ..weavefile import write_weave
68
from .scenarios import load_tests_apply_scenarios
5559.2.2 by Martin Pool
Change to using standard load_tests_apply_scenarios.
69
70
71
load_tests = load_tests_apply_scenarios
3350.6.2 by Robert Collins
Prepare parameterised test environment.
72
73
3350.3.11 by Robert Collins
Test inserting a stream that overlaps the current content of a knit does not error.
74
def get_diamond_vf(f, trailing_eol=True, left_only=False):
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
75
    """Get a diamond graph to exercise deltas and merges.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
76
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
77
    :param trailing_eol: If True end the last line with \n.
78
    """
79
    parents = {
80
        'origin': (),
81
        'base': (('origin',),),
82
        'left': (('base',),),
83
        'right': (('base',),),
84
        'merged': (('left',), ('right',)),
85
        }
86
    # insert a diamond graph to exercise deltas and merges.
87
    if trailing_eol:
88
        last_char = '\n'
89
    else:
90
        last_char = ''
91
    f.add_lines('origin', [], ['origin' + last_char])
92
    f.add_lines('base', ['origin'], ['base' + last_char])
93
    f.add_lines('left', ['base'], ['base\n', 'left' + last_char])
3350.3.11 by Robert Collins
Test inserting a stream that overlaps the current content of a knit does not error.
94
    if not left_only:
95
        f.add_lines('right', ['base'],
96
            ['base\n', 'right' + last_char])
97
        f.add_lines('merged', ['left', 'right'],
98
            ['base\n', 'left\n', 'right\n', 'merged' + last_char])
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
99
    return f, parents
100
101
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
102
def get_diamond_files(files, key_length, trailing_eol=True, left_only=False,
4241.4.1 by Ian Clatworthy
add sha generation support to versionedfiles
103
    nograph=False, nokeys=False):
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
104
    """Get a diamond graph to exercise deltas and merges.
105
106
    This creates a 5-node graph in files. If files supports 2-length keys two
107
    graphs are made to exercise the support for multiple ids.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
108
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
109
    :param trailing_eol: If True end the last line with \n.
110
    :param key_length: The length of keys in files. Currently supports length 1
111
        and 2 keys.
112
    :param left_only: If True do not add the right and merged nodes.
113
    :param nograph: If True, do not provide parents to the add_lines calls;
114
        this is useful for tests that need inserted data but have graphless
115
        stores.
4241.4.1 by Ian Clatworthy
add sha generation support to versionedfiles
116
    :param nokeys: If True, pass None is as the key for all insertions.
117
        Currently implies nograph.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
118
    :return: The results of the add_lines calls.
119
    """
4241.4.1 by Ian Clatworthy
add sha generation support to versionedfiles
120
    if nokeys:
121
        nograph = True
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
122
    if key_length == 1:
123
        prefixes = [()]
124
    else:
125
        prefixes = [('FileA',), ('FileB',)]
126
    # insert a diamond graph to exercise deltas and merges.
127
    if trailing_eol:
128
        last_char = '\n'
129
    else:
130
        last_char = ''
131
    result = []
132
    def get_parents(suffix_list):
133
        if nograph:
134
            return ()
135
        else:
136
            result = [prefix + suffix for suffix in suffix_list]
137
            return result
4241.4.1 by Ian Clatworthy
add sha generation support to versionedfiles
138
    def get_key(suffix):
139
        if nokeys:
140
            return (None, )
141
        else:
142
            return (suffix,)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
143
    # we loop over each key because that spreads the inserts across prefixes,
144
    # which is how commit operates.
145
    for prefix in prefixes:
4241.4.1 by Ian Clatworthy
add sha generation support to versionedfiles
146
        result.append(files.add_lines(prefix + get_key('origin'), (),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
147
            ['origin' + last_char]))
148
    for prefix in prefixes:
4241.4.1 by Ian Clatworthy
add sha generation support to versionedfiles
149
        result.append(files.add_lines(prefix + get_key('base'),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
150
            get_parents([('origin',)]), ['base' + last_char]))
151
    for prefix in prefixes:
4241.4.1 by Ian Clatworthy
add sha generation support to versionedfiles
152
        result.append(files.add_lines(prefix + get_key('left'),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
153
            get_parents([('base',)]),
154
            ['base\n', 'left' + last_char]))
155
    if not left_only:
156
        for prefix in prefixes:
4241.4.1 by Ian Clatworthy
add sha generation support to versionedfiles
157
            result.append(files.add_lines(prefix + get_key('right'),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
158
                get_parents([('base',)]),
159
                ['base\n', 'right' + last_char]))
160
        for prefix in prefixes:
4241.4.1 by Ian Clatworthy
add sha generation support to versionedfiles
161
            result.append(files.add_lines(prefix + get_key('merged'),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
162
                get_parents([('left',), ('right',)]),
163
                ['base\n', 'left\n', 'right\n', 'merged' + last_char]))
164
    return result
165
166
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
167
class VersionedFileTestMixIn(object):
168
    """A mixin test class for testing VersionedFiles.
169
170
    This is not an adaptor-style test at this point because
171
    theres no dynamic substitution of versioned file implementations,
172
    they are strictly controlled by their owning repositories.
173
    """
174
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
175
    def get_transaction(self):
176
        if not hasattr(self, '_transaction'):
177
            self._transaction = None
178
        return self._transaction
179
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
180
    def test_add(self):
181
        f = self.get_file()
182
        f.add_lines('r0', [], ['a\n', 'b\n'])
183
        f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
1563.2.9 by Robert Collins
Update versionedfile api tests to ensure that data is available after every operation.
184
        def verify_file(f):
185
            versions = f.versions()
186
            self.assertTrue('r0' in versions)
187
            self.assertTrue('r1' in versions)
6614.1.3 by Vincent Ladeuil
Fix assertEquals being deprecated by using assertEqual.
188
            self.assertEqual(f.get_lines('r0'), ['a\n', 'b\n'])
189
            self.assertEqual(f.get_text('r0'), 'a\nb\n')
190
            self.assertEqual(f.get_lines('r1'), ['b\n', 'c\n'])
1563.2.18 by Robert Collins
get knit repositories really using knits for text storage.
191
            self.assertEqual(2, len(f))
192
            self.assertEqual(2, f.num_versions())
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
193
1563.2.9 by Robert Collins
Update versionedfile api tests to ensure that data is available after every operation.
194
            self.assertRaises(RevisionNotPresent,
195
                f.add_lines, 'r2', ['foo'], [])
196
            self.assertRaises(RevisionAlreadyPresent,
197
                f.add_lines, 'r1', [], [])
198
        verify_file(f)
1666.1.6 by Robert Collins
Make knit the default format.
199
        # this checks that reopen with create=True does not break anything.
200
        f = self.reopen_file(create=True)
1563.2.9 by Robert Collins
Update versionedfile api tests to ensure that data is available after every operation.
201
        verify_file(f)
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
202
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
203
    def test_adds_with_parent_texts(self):
204
        f = self.get_file()
205
        parent_texts = {}
2776.1.1 by Robert Collins
* The ``add_lines`` methods on ``VersionedFile`` implementations has changed
206
        _, _, parent_texts['r0'] = f.add_lines('r0', [], ['a\n', 'b\n'])
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
207
        try:
2776.1.1 by Robert Collins
* The ``add_lines`` methods on ``VersionedFile`` implementations has changed
208
            _, _, parent_texts['r1'] = f.add_lines_with_ghosts('r1',
209
                ['r0', 'ghost'], ['b\n', 'c\n'], parent_texts=parent_texts)
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
210
        except NotImplementedError:
211
            # if the format doesn't support ghosts, just add normally.
2776.1.1 by Robert Collins
* The ``add_lines`` methods on ``VersionedFile`` implementations has changed
212
            _, _, parent_texts['r1'] = f.add_lines('r1',
213
                ['r0'], ['b\n', 'c\n'], parent_texts=parent_texts)
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
214
        f.add_lines('r2', ['r1'], ['c\n', 'd\n'], parent_texts=parent_texts)
215
        self.assertNotEqual(None, parent_texts['r0'])
216
        self.assertNotEqual(None, parent_texts['r1'])
217
        def verify_file(f):
218
            versions = f.versions()
219
            self.assertTrue('r0' in versions)
220
            self.assertTrue('r1' in versions)
221
            self.assertTrue('r2' in versions)
6614.1.3 by Vincent Ladeuil
Fix assertEquals being deprecated by using assertEqual.
222
            self.assertEqual(f.get_lines('r0'), ['a\n', 'b\n'])
223
            self.assertEqual(f.get_lines('r1'), ['b\n', 'c\n'])
224
            self.assertEqual(f.get_lines('r2'), ['c\n', 'd\n'])
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
225
            self.assertEqual(3, f.num_versions())
226
            origins = f.annotate('r1')
6614.1.3 by Vincent Ladeuil
Fix assertEquals being deprecated by using assertEqual.
227
            self.assertEqual(origins[0][0], 'r0')
228
            self.assertEqual(origins[1][0], 'r1')
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
229
            origins = f.annotate('r2')
6614.1.3 by Vincent Ladeuil
Fix assertEquals being deprecated by using assertEqual.
230
            self.assertEqual(origins[0][0], 'r1')
231
            self.assertEqual(origins[1][0], 'r2')
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
232
233
        verify_file(f)
234
        f = self.reopen_file()
235
        verify_file(f)
236
2805.6.7 by Robert Collins
Review feedback.
237
    def test_add_unicode_content(self):
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
238
        # unicode content is not permitted in versioned files.
2805.6.7 by Robert Collins
Review feedback.
239
        # versioned files version sequences of bytes only.
240
        vf = self.get_file()
241
        self.assertRaises(errors.BzrBadParameterUnicode,
242
            vf.add_lines, 'a', [], ['a\n', u'b\n', 'c\n'])
243
        self.assertRaises(
244
            (errors.BzrBadParameterUnicode, NotImplementedError),
245
            vf.add_lines_with_ghosts, 'a', [], ['a\n', u'b\n', 'c\n'])
246
2520.4.150 by Aaron Bentley
Test that non-Weave uses left_matching_blocks for add_lines
247
    def test_add_follows_left_matching_blocks(self):
248
        """If we change left_matching_blocks, delta changes
249
250
        Note: There are multiple correct deltas in this case, because
251
        we start with 1 "a" and we get 3.
252
        """
253
        vf = self.get_file()
254
        if isinstance(vf, WeaveFile):
255
            raise TestSkipped("WeaveFile ignores left_matching_blocks")
256
        vf.add_lines('1', [], ['a\n'])
257
        vf.add_lines('2', ['1'], ['a\n', 'a\n', 'a\n'],
258
                     left_matching_blocks=[(0, 0, 1), (1, 3, 0)])
2794.1.2 by Robert Collins
Nuke versioned file add/get delta support, allowing easy simplification of unannotated Content, reducing memory copies and friction during commit on unannotated texts.
259
        self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('2'))
2520.4.150 by Aaron Bentley
Test that non-Weave uses left_matching_blocks for add_lines
260
        vf.add_lines('3', ['1'], ['a\n', 'a\n', 'a\n'],
261
                     left_matching_blocks=[(0, 2, 1), (1, 3, 0)])
2794.1.2 by Robert Collins
Nuke versioned file add/get delta support, allowing easy simplification of unannotated Content, reducing memory copies and friction during commit on unannotated texts.
262
        self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('3'))
2520.4.150 by Aaron Bentley
Test that non-Weave uses left_matching_blocks for add_lines
263
2805.6.7 by Robert Collins
Review feedback.
264
    def test_inline_newline_throws(self):
265
        # \r characters are not permitted in lines being added
266
        vf = self.get_file()
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
267
        self.assertRaises(errors.BzrBadParameterContainsNewline,
2805.6.7 by Robert Collins
Review feedback.
268
            vf.add_lines, 'a', [], ['a\n\n'])
269
        self.assertRaises(
270
            (errors.BzrBadParameterContainsNewline, NotImplementedError),
271
            vf.add_lines_with_ghosts, 'a', [], ['a\n\n'])
272
        # but inline CR's are allowed
273
        vf.add_lines('a', [], ['a\r\n'])
274
        try:
275
            vf.add_lines_with_ghosts('b', [], ['a\r\n'])
276
        except NotImplementedError:
277
            pass
278
2229.2.1 by Aaron Bentley
Reject reserved ids in versiondfile, tree, branch and repository
279
    def test_add_reserved(self):
280
        vf = self.get_file()
281
        self.assertRaises(errors.ReservedId,
282
            vf.add_lines, 'a:', [], ['a\n', 'b\n', 'c\n'])
283
2794.1.1 by Robert Collins
Allow knits to be instructed not to add a text based on a sha, for commit.
284
    def test_add_lines_nostoresha(self):
285
        """When nostore_sha is supplied using old content raises."""
286
        vf = self.get_file()
287
        empty_text = ('a', [])
288
        sample_text_nl = ('b', ["foo\n", "bar\n"])
289
        sample_text_no_nl = ('c', ["foo\n", "bar"])
290
        shas = []
291
        for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
292
            sha, _, _ = vf.add_lines(version, [], lines)
293
            shas.append(sha)
294
        # we now have a copy of all the lines in the vf.
295
        for sha, (version, lines) in zip(
296
            shas, (empty_text, sample_text_nl, sample_text_no_nl)):
297
            self.assertRaises(errors.ExistingContent,
298
                vf.add_lines, version + "2", [], lines,
299
                nostore_sha=sha)
300
            # and no new version should have been added.
301
            self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
302
                version + "2")
303
2803.1.1 by Robert Collins
Fix typo in ghosts version of test_add_lines_nostoresha.
304
    def test_add_lines_with_ghosts_nostoresha(self):
2794.1.1 by Robert Collins
Allow knits to be instructed not to add a text based on a sha, for commit.
305
        """When nostore_sha is supplied using old content raises."""
306
        vf = self.get_file()
307
        empty_text = ('a', [])
308
        sample_text_nl = ('b', ["foo\n", "bar\n"])
309
        sample_text_no_nl = ('c', ["foo\n", "bar"])
310
        shas = []
311
        for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
312
            sha, _, _ = vf.add_lines(version, [], lines)
313
            shas.append(sha)
314
        # we now have a copy of all the lines in the vf.
315
        # is the test applicable to this vf implementation?
316
        try:
317
            vf.add_lines_with_ghosts('d', [], [])
318
        except NotImplementedError:
319
            raise TestSkipped("add_lines_with_ghosts is optional")
320
        for sha, (version, lines) in zip(
321
            shas, (empty_text, sample_text_nl, sample_text_no_nl)):
322
            self.assertRaises(errors.ExistingContent,
323
                vf.add_lines_with_ghosts, version + "2", [], lines,
324
                nostore_sha=sha)
325
            # and no new version should have been added.
326
            self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
327
                version + "2")
328
2776.1.1 by Robert Collins
* The ``add_lines`` methods on ``VersionedFile`` implementations has changed
329
    def test_add_lines_return_value(self):
330
        # add_lines should return the sha1 and the text size.
331
        vf = self.get_file()
332
        empty_text = ('a', [])
333
        sample_text_nl = ('b', ["foo\n", "bar\n"])
334
        sample_text_no_nl = ('c', ["foo\n", "bar"])
335
        # check results for the three cases:
336
        for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
337
            # the first two elements are the same for all versioned files:
338
            # - the digest and the size of the text. For some versioned files
339
            #   additional data is returned in additional tuple elements.
340
            result = vf.add_lines(version, [], lines)
341
            self.assertEqual(3, len(result))
342
            self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
343
                result[0:2])
344
        # parents should not affect the result:
345
        lines = sample_text_nl[1]
346
        self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
347
            vf.add_lines('d', ['b', 'c'], lines)[0:2])
348
2229.2.1 by Aaron Bentley
Reject reserved ids in versiondfile, tree, branch and repository
349
    def test_get_reserved(self):
350
        vf = self.get_file()
351
        self.assertRaises(errors.ReservedId, vf.get_texts, ['b:'])
352
        self.assertRaises(errors.ReservedId, vf.get_lines, 'b:')
353
        self.assertRaises(errors.ReservedId, vf.get_text, 'b:')
354
3468.2.4 by Martin Pool
Test and fix #234748 problems in trailing newline diffs
355
    def test_add_unchanged_last_line_noeol_snapshot(self):
356
        """Add a text with an unchanged last line with no eol should work."""
357
        # Test adding this in a number of chain lengths; because the interface
358
        # for VersionedFile does not allow forcing a specific chain length, we
359
        # just use a small base to get the first snapshot, then a much longer
360
        # first line for the next add (which will make the third add snapshot)
361
        # and so on. 20 has been chosen as an aribtrary figure - knits use 200
362
        # as a capped delta length, but ideally we would have some way of
363
        # tuning the test to the store (e.g. keep going until a snapshot
364
        # happens).
365
        for length in range(20):
366
            version_lines = {}
367
            vf = self.get_file('case-%d' % length)
368
            prefix = 'step-%d'
369
            parents = []
370
            for step in range(length):
371
                version = prefix % step
372
                lines = (['prelude \n'] * step) + ['line']
373
                vf.add_lines(version, parents, lines)
374
                version_lines[version] = lines
375
                parents = [version]
376
            vf.add_lines('no-eol', parents, ['line'])
377
            vf.get_texts(version_lines.keys())
378
            self.assertEqualDiff('line', vf.get_text('no-eol'))
379
380
    def test_get_texts_eol_variation(self):
381
        # similar to the failure in <http://bugs.launchpad.net/234748>
382
        vf = self.get_file()
383
        sample_text_nl = ["line\n"]
384
        sample_text_no_nl = ["line"]
385
        versions = []
386
        version_lines = {}
387
        parents = []
388
        for i in range(4):
389
            version = 'v%d' % i
390
            if i % 2:
391
                lines = sample_text_nl
392
            else:
393
                lines = sample_text_no_nl
394
            # left_matching blocks is an internal api; it operates on the
395
            # *internal* representation for a knit, which is with *all* lines
396
            # being normalised to end with \n - even the final line in a no_nl
397
            # file. Using it here ensures that a broken internal implementation
398
            # (which is what this test tests) will generate a correct line
399
            # delta (which is to say, an empty delta).
400
            vf.add_lines(version, parents, lines,
401
                left_matching_blocks=[(0, 0, 1)])
402
            parents = [version]
403
            versions.append(version)
404
            version_lines[version] = lines
405
        vf.check()
406
        vf.get_texts(versions)
407
        vf.get_texts(reversed(versions))
408
3460.2.1 by Robert Collins
* Inserting a bundle which changes the contents of a file with no trailing
409
    def test_add_lines_with_matching_blocks_noeol_last_line(self):
410
        """Add a text with an unchanged last line with no eol should work."""
6622.1.34 by Jelmer Vernooij
Rename brzlib => breezy.
411
        from breezy import multiparent
3460.2.1 by Robert Collins
* Inserting a bundle which changes the contents of a file with no trailing
412
        # Hand verified sha1 of the text we're adding.
413
        sha1 = '6a1d115ec7b60afb664dc14890b5af5ce3c827a4'
414
        # Create a mpdiff which adds a new line before the trailing line, and
415
        # reuse the last line unaltered (which can cause annotation reuse).
416
        # Test adding this in two situations:
417
        # On top of a new insertion
418
        vf = self.get_file('fulltext')
419
        vf.add_lines('noeol', [], ['line'])
420
        vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
421
            left_matching_blocks=[(0, 1, 1)])
422
        self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
423
        # On top of a delta
424
        vf = self.get_file('delta')
425
        vf.add_lines('base', [], ['line'])
426
        vf.add_lines('noeol', ['base'], ['prelude\n', 'line'])
427
        vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
428
            left_matching_blocks=[(1, 1, 1)])
429
        self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
430
2520.4.85 by Aaron Bentley
Get all test passing (which just proves there aren't enough tests!)
431
    def test_make_mpdiffs(self):
6622.1.34 by Jelmer Vernooij
Rename brzlib => breezy.
432
        from breezy import multiparent
2520.4.3 by Aaron Bentley
Implement plain strategy for extracting and installing multiparent diffs
433
        vf = self.get_file('foo')
434
        sha1s = self._setup_for_deltas(vf)
435
        new_vf = self.get_file('bar')
436
        for version in multiparent.topo_iter(vf):
2520.4.85 by Aaron Bentley
Get all test passing (which just proves there aren't enough tests!)
437
            mpdiff = vf.make_mpdiffs([version])[0]
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
438
            new_vf.add_mpdiffs([(version, vf.get_parent_map([version])[version],
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
439
                                 vf.get_sha1s([version])[version], mpdiff)])
2520.4.3 by Aaron Bentley
Implement plain strategy for extracting and installing multiparent diffs
440
            self.assertEqualDiff(vf.get_text(version),
441
                                 new_vf.get_text(version))
442
3453.3.2 by John Arbash Meinel
Add a test case for the first loop, unable to find a way to trigger the second loop
443
    def test_make_mpdiffs_with_ghosts(self):
444
        vf = self.get_file('foo')
3453.3.4 by John Arbash Meinel
Skip the new test for old weave formats that don't support ghosts
445
        try:
446
            vf.add_lines_with_ghosts('text', ['ghost'], ['line\n'])
447
        except NotImplementedError:
448
            # old Weave formats do not allow ghosts
449
            return
3453.3.2 by John Arbash Meinel
Add a test case for the first loop, unable to find a way to trigger the second loop
450
        self.assertRaises(errors.RevisionNotPresent, vf.make_mpdiffs, ['ghost'])
451
1596.2.38 by Robert Collins
rollback from using deltas to using fulltexts - deltas need more work to be ready.
452
    def _setup_for_deltas(self, f):
2794.1.2 by Robert Collins
Nuke versioned file add/get delta support, allowing easy simplification of unannotated Content, reducing memory copies and friction during commit on unannotated texts.
453
        self.assertFalse(f.has_version('base'))
1596.2.36 by Robert Collins
add a get_delta api to versioned_file.
454
        # add texts that should trip the knit maximum delta chain threshold
455
        # as well as doing parallel chains of data in knits.
456
        # this is done by two chains of 25 insertions
457
        f.add_lines('base', [], ['line\n'])
1596.2.38 by Robert Collins
rollback from using deltas to using fulltexts - deltas need more work to be ready.
458
        f.add_lines('noeol', ['base'], ['line'])
459
        # detailed eol tests:
460
        # shared last line with parent no-eol
461
        f.add_lines('noeolsecond', ['noeol'], ['line\n', 'line'])
462
        # differing last line with parent, both no-eol
463
        f.add_lines('noeolnotshared', ['noeolsecond'], ['line\n', 'phone'])
464
        # add eol following a noneol parent, change content
465
        f.add_lines('eol', ['noeol'], ['phone\n'])
466
        # add eol following a noneol parent, no change content
467
        f.add_lines('eolline', ['noeol'], ['line\n'])
468
        # noeol with no parents:
469
        f.add_lines('noeolbase', [], ['line'])
470
        # noeol preceeding its leftmost parent in the output:
471
        # this is done by making it a merge of two parents with no common
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
472
        # anestry: noeolbase and noeol with the
1596.2.38 by Robert Collins
rollback from using deltas to using fulltexts - deltas need more work to be ready.
473
        # later-inserted parent the leftmost.
474
        f.add_lines('eolbeforefirstparent', ['noeolbase', 'noeol'], ['line'])
475
        # two identical eol texts
476
        f.add_lines('noeoldup', ['noeol'], ['line'])
1596.2.36 by Robert Collins
add a get_delta api to versioned_file.
477
        next_parent = 'base'
478
        text_name = 'chain1-'
479
        text = ['line\n']
480
        sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
481
                 1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
482
                 2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
483
                 3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
484
                 4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
485
                 5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
486
                 6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
487
                 7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
488
                 8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
489
                 9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
490
                 10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
491
                 11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
492
                 12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
493
                 13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
494
                 14:'2c4b1736566b8ca6051e668de68650686a3922f2',
495
                 15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
496
                 16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
497
                 17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
498
                 18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
499
                 19:'1ebed371807ba5935958ad0884595126e8c4e823',
500
                 20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
501
                 21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
502
                 22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
503
                 23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
504
                 24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
505
                 25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
506
                 }
507
        for depth in range(26):
508
            new_version = text_name + '%s' % depth
509
            text = text + ['line\n']
510
            f.add_lines(new_version, [next_parent], text)
511
            next_parent = new_version
512
        next_parent = 'base'
513
        text_name = 'chain2-'
514
        text = ['line\n']
515
        for depth in range(26):
516
            new_version = text_name + '%s' % depth
517
            text = text + ['line\n']
518
            f.add_lines(new_version, [next_parent], text)
519
            next_parent = new_version
1596.2.38 by Robert Collins
rollback from using deltas to using fulltexts - deltas need more work to be ready.
520
        return sha1s
1596.2.37 by Robert Collins
Switch to delta based content copying in the generic versioned file copier.
521
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
522
    def test_ancestry(self):
523
        f = self.get_file()
1563.2.29 by Robert Collins
Remove all but fetch references to repository.revision_store.
524
        self.assertEqual([], f.get_ancestry([]))
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
525
        f.add_lines('r0', [], ['a\n', 'b\n'])
526
        f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
527
        f.add_lines('r2', ['r0'], ['b\n', 'c\n'])
528
        f.add_lines('r3', ['r2'], ['b\n', 'c\n'])
529
        f.add_lines('rM', ['r1', 'r2'], ['b\n', 'c\n'])
1563.2.29 by Robert Collins
Remove all but fetch references to repository.revision_store.
530
        self.assertEqual([], f.get_ancestry([]))
1563.2.35 by Robert Collins
cleanup deprecation warnings and finish conversion so the inventory is knit based too.
531
        versions = f.get_ancestry(['rM'])
532
        # there are some possibilities:
533
        # r0 r1 r2 rM r3
534
        # r0 r1 r2 r3 rM
535
        # etc
536
        # so we check indexes
537
        r0 = versions.index('r0')
538
        r1 = versions.index('r1')
539
        r2 = versions.index('r2')
540
        self.assertFalse('r3' in versions)
541
        rM = versions.index('rM')
542
        self.assertTrue(r0 < r1)
543
        self.assertTrue(r0 < r2)
544
        self.assertTrue(r1 < rM)
545
        self.assertTrue(r2 < rM)
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
546
547
        self.assertRaises(RevisionNotPresent,
548
            f.get_ancestry, ['rM', 'rX'])
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
549
2530.1.1 by Aaron Bentley
Make topological sorting optional for get_ancestry
550
        self.assertEqual(set(f.get_ancestry('rM')),
551
            set(f.get_ancestry('rM', topo_sorted=False)))
552
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
553
    def test_mutate_after_finish(self):
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
554
        self._transaction = 'before'
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
555
        f = self.get_file()
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
556
        self._transaction = 'after'
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
557
        self.assertRaises(errors.OutSideTransaction, f.add_lines, '', [], [])
558
        self.assertRaises(errors.OutSideTransaction, f.add_lines_with_ghosts, '', [], [])
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
559
1563.2.15 by Robert Collins
remove the weavestore assumptions about the number and nature of files it manages.
560
    def test_copy_to(self):
561
        f = self.get_file()
562
        f.add_lines('0', [], ['a\n'])
563
        t = MemoryTransport()
564
        f.copy_to('foo', t)
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
565
        for suffix in self.get_factory().get_suffixes():
1563.2.15 by Robert Collins
remove the weavestore assumptions about the number and nature of files it manages.
566
            self.assertTrue(t.has('foo' + suffix))
567
568
    def test_get_suffixes(self):
569
        f = self.get_file()
570
        # and should be a list
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
571
        self.assertTrue(isinstance(self.get_factory().get_suffixes(), list))
1563.2.15 by Robert Collins
remove the weavestore assumptions about the number and nature of files it manages.
572
3287.5.1 by Robert Collins
Add VersionedFile.get_parent_map.
573
    def test_get_parent_map(self):
574
        f = self.get_file()
575
        f.add_lines('r0', [], ['a\n', 'b\n'])
576
        self.assertEqual(
577
            {'r0':()}, f.get_parent_map(['r0']))
578
        f.add_lines('r1', ['r0'], ['a\n', 'b\n'])
579
        self.assertEqual(
580
            {'r1':('r0',)}, f.get_parent_map(['r1']))
581
        self.assertEqual(
582
            {'r0':(),
583
             'r1':('r0',)},
584
            f.get_parent_map(['r0', 'r1']))
585
        f.add_lines('r2', [], ['a\n', 'b\n'])
586
        f.add_lines('r3', [], ['a\n', 'b\n'])
587
        f.add_lines('m', ['r0', 'r1', 'r2', 'r3'], ['a\n', 'b\n'])
588
        self.assertEqual(
589
            {'m':('r0', 'r1', 'r2', 'r3')}, f.get_parent_map(['m']))
590
        self.assertEqual({}, f.get_parent_map('y'))
591
        self.assertEqual(
592
            {'r0':(),
593
             'r1':('r0',)},
594
            f.get_parent_map(['r0', 'y', 'r1']))
595
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
596
    def test_annotate(self):
597
        f = self.get_file()
598
        f.add_lines('r0', [], ['a\n', 'b\n'])
599
        f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
600
        origins = f.annotate('r1')
6614.1.3 by Vincent Ladeuil
Fix assertEquals being deprecated by using assertEqual.
601
        self.assertEqual(origins[0][0], 'r1')
602
        self.assertEqual(origins[1][0], 'r0')
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
603
604
        self.assertRaises(RevisionNotPresent,
605
            f.annotate, 'foo')
606
1563.2.6 by Robert Collins
Start check tests for knits (pending), and remove dead code.
607
    def test_detection(self):
608
        # Test weaves detect corruption.
609
        #
610
        # Weaves contain a checksum of their texts.
611
        # When a text is extracted, this checksum should be
612
        # verified.
613
614
        w = self.get_file_corrupted_text()
615
616
        self.assertEqual('hello\n', w.get_text('v1'))
5582.9.20 by Jelmer Vernooij
remove some of the weave changes.
617
        self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
618
        self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
619
        self.assertRaises(errors.WeaveInvalidChecksum, w.check)
1563.2.6 by Robert Collins
Start check tests for knits (pending), and remove dead code.
620
621
        w = self.get_file_corrupted_checksum()
622
623
        self.assertEqual('hello\n', w.get_text('v1'))
5582.9.20 by Jelmer Vernooij
remove some of the weave changes.
624
        self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
625
        self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
626
        self.assertRaises(errors.WeaveInvalidChecksum, w.check)
1563.2.6 by Robert Collins
Start check tests for knits (pending), and remove dead code.
627
628
    def get_file_corrupted_text(self):
629
        """Return a versioned file with corrupt text but valid metadata."""
630
        raise NotImplementedError(self.get_file_corrupted_text)
631
1563.2.9 by Robert Collins
Update versionedfile api tests to ensure that data is available after every operation.
632
    def reopen_file(self, name='foo'):
633
        """Open the versioned file from disk again."""
634
        raise NotImplementedError(self.reopen_file)
635
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
636
    def test_iter_lines_added_or_present_in_versions(self):
637
        # test that we get at least an equalset of the lines added by
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
638
        # versions in the weave
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
639
        # the ordering here is to make a tree so that dumb searches have
640
        # more changes to muck up.
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
641
4961.2.9 by Martin Pool
Rip out most remaining uses of DummyProgressBar
642
        class InstrumentedProgress(progress.ProgressTask):
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
643
644
            def __init__(self):
4961.2.9 by Martin Pool
Rip out most remaining uses of DummyProgressBar
645
                progress.ProgressTask.__init__(self)
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
646
                self.updates = []
647
648
            def update(self, msg=None, current=None, total=None):
649
                self.updates.append((msg, current, total))
650
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
651
        vf = self.get_file()
652
        # add a base to get included
653
        vf.add_lines('base', [], ['base\n'])
654
        # add a ancestor to be included on one side
655
        vf.add_lines('lancestor', [], ['lancestor\n'])
656
        # add a ancestor to be included on the other side
657
        vf.add_lines('rancestor', ['base'], ['rancestor\n'])
658
        # add a child of rancestor with no eofile-nl
659
        vf.add_lines('child', ['rancestor'], ['base\n', 'child\n'])
660
        # add a child of lancestor and base to join the two roots
661
        vf.add_lines('otherchild',
662
                     ['lancestor', 'base'],
663
                     ['base\n', 'lancestor\n', 'otherchild\n'])
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
664
        def iter_with_versions(versions, expected):
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
665
            # now we need to see what lines are returned, and how often.
2975.3.1 by Robert Collins
Change (without backwards compatibility) the
666
            lines = {}
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
667
            progress = InstrumentedProgress()
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
668
            # iterate over the lines
2975.3.1 by Robert Collins
Change (without backwards compatibility) the
669
            for line in vf.iter_lines_added_or_present_in_versions(versions,
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
670
                pb=progress):
2975.3.1 by Robert Collins
Change (without backwards compatibility) the
671
                lines.setdefault(line, 0)
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
672
                lines[line] += 1
2975.3.1 by Robert Collins
Change (without backwards compatibility) the
673
            if []!= progress.updates:
2039.1.2 by Aaron Bentley
Tweak test to avoid catching assert
674
                self.assertEqual(expected, progress.updates)
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
675
            return lines
2147.1.3 by John Arbash Meinel
In knit.py we were re-using a variable in 2 loops, causing bogus progress messages to be generated.
676
        lines = iter_with_versions(['child', 'otherchild'],
4103.3.4 by Martin Pool
Update test that depends on progress bar strings
677
                                   [('Walking content', 0, 2),
678
                                    ('Walking content', 1, 2),
679
                                    ('Walking content', 2, 2)])
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
680
        # we must see child and otherchild
2975.3.1 by Robert Collins
Change (without backwards compatibility) the
681
        self.assertTrue(lines[('child\n', 'child')] > 0)
682
        self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
683
        # we dont care if we got more than that.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
684
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
685
        # test all lines
4103.3.4 by Martin Pool
Update test that depends on progress bar strings
686
        lines = iter_with_versions(None, [('Walking content', 0, 5),
687
                                          ('Walking content', 1, 5),
688
                                          ('Walking content', 2, 5),
689
                                          ('Walking content', 3, 5),
690
                                          ('Walking content', 4, 5),
691
                                          ('Walking content', 5, 5)])
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
692
        # all lines must be seen at least once
2975.3.1 by Robert Collins
Change (without backwards compatibility) the
693
        self.assertTrue(lines[('base\n', 'base')] > 0)
694
        self.assertTrue(lines[('lancestor\n', 'lancestor')] > 0)
695
        self.assertTrue(lines[('rancestor\n', 'rancestor')] > 0)
696
        self.assertTrue(lines[('child\n', 'child')] > 0)
697
        self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
1594.2.7 by Robert Collins
Add versionedfile.fix_parents api for correcting data post hoc.
698
1594.2.8 by Robert Collins
add ghost aware apis to knits.
699
    def test_add_lines_with_ghosts(self):
700
        # some versioned file formats allow lines to be added with parent
701
        # information that is > than that in the format. Formats that do
702
        # not support this need to raise NotImplementedError on the
703
        # add_lines_with_ghosts api.
704
        vf = self.get_file()
705
        # add a revision with ghost parents
2249.5.12 by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8
706
        # The preferred form is utf8, but we should translate when needed
707
        parent_id_unicode = u'b\xbfse'
708
        parent_id_utf8 = parent_id_unicode.encode('utf8')
1594.2.8 by Robert Collins
add ghost aware apis to knits.
709
        try:
2309.4.7 by John Arbash Meinel
Update VersionedFile tests to ensure that they can take Unicode,
710
            vf.add_lines_with_ghosts('notbxbfse', [parent_id_utf8], [])
1594.2.8 by Robert Collins
add ghost aware apis to knits.
711
        except NotImplementedError:
712
            # check the other ghost apis are also not implemented
713
            self.assertRaises(NotImplementedError, vf.get_ancestry_with_ghosts, ['foo'])
714
            self.assertRaises(NotImplementedError, vf.get_parents_with_ghosts, 'foo')
715
            return
2150.2.1 by Robert Collins
Correctly decode utf8 revision ids from knits when parsing, fixes a regression where a unicode revision id is stored correctly, but then indexed by the utf8 value on the next invocation of bzr, rather than the unicode value.
716
        vf = self.reopen_file()
1594.2.8 by Robert Collins
add ghost aware apis to knits.
717
        # test key graph related apis: getncestry, _graph, get_parents
718
        # has_version
719
        # - these are ghost unaware and must not be reflect ghosts
2249.5.12 by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8
720
        self.assertEqual(['notbxbfse'], vf.get_ancestry('notbxbfse'))
721
        self.assertFalse(vf.has_version(parent_id_utf8))
1594.2.8 by Robert Collins
add ghost aware apis to knits.
722
        # we have _with_ghost apis to give us ghost information.
2249.5.12 by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8
723
        self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry_with_ghosts(['notbxbfse']))
724
        self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
1594.2.8 by Robert Collins
add ghost aware apis to knits.
725
        # if we add something that is a ghost of another, it should correct the
726
        # results of the prior apis
2858.2.1 by Martin Pool
Remove most calls to safe_file_id and safe_revision_id.
727
        vf.add_lines(parent_id_utf8, [], [])
2249.5.12 by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8
728
        self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry(['notbxbfse']))
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
729
        self.assertEqual({'notbxbfse':(parent_id_utf8,)},
730
            vf.get_parent_map(['notbxbfse']))
2249.5.12 by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8
731
        self.assertTrue(vf.has_version(parent_id_utf8))
1594.2.8 by Robert Collins
add ghost aware apis to knits.
732
        # we have _with_ghost apis to give us ghost information.
2858.2.1 by Martin Pool
Remove most calls to safe_file_id and safe_revision_id.
733
        self.assertEqual([parent_id_utf8, 'notbxbfse'],
734
            vf.get_ancestry_with_ghosts(['notbxbfse']))
2249.5.12 by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8
735
        self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
1594.2.8 by Robert Collins
add ghost aware apis to knits.
736
1594.2.9 by Robert Collins
Teach Knit repositories how to handle ghosts without corrupting at all.
737
    def test_add_lines_with_ghosts_after_normal_revs(self):
738
        # some versioned file formats allow lines to be added with parent
739
        # information that is > than that in the format. Formats that do
740
        # not support this need to raise NotImplementedError on the
741
        # add_lines_with_ghosts api.
742
        vf = self.get_file()
743
        # probe for ghost support
744
        try:
3287.6.5 by Robert Collins
Deprecate VersionedFile.has_ghost.
745
            vf.add_lines_with_ghosts('base', [], ['line\n', 'line_b\n'])
1594.2.9 by Robert Collins
Teach Knit repositories how to handle ghosts without corrupting at all.
746
        except NotImplementedError:
747
            return
748
        vf.add_lines_with_ghosts('references_ghost',
749
                                 ['base', 'a_ghost'],
750
                                 ['line\n', 'line_b\n', 'line_c\n'])
751
        origins = vf.annotate('references_ghost')
6614.1.3 by Vincent Ladeuil
Fix assertEquals being deprecated by using assertEqual.
752
        self.assertEqual(('base', 'line\n'), origins[0])
753
        self.assertEqual(('base', 'line_b\n'), origins[1])
754
        self.assertEqual(('references_ghost', 'line_c\n'), origins[2])
1594.2.23 by Robert Collins
Test versioned file storage handling of clean/dirty status for accessed versioned files.
755
756
    def test_readonly_mode(self):
5609.9.4 by Vincent Ladeuil
Use self.get_transport instead of transport.get_transport where possible.
757
        t = self.get_transport()
1594.2.23 by Robert Collins
Test versioned file storage handling of clean/dirty status for accessed versioned files.
758
        factory = self.get_factory()
6619.3.14 by Jelmer Vernooij
Convert some octal numbers to new notations.
759
        vf = factory('id', t, 0o777, create=True, access_mode='w')
5273.1.7 by Vincent Ladeuil
No more use of the get_transport imported *symbol*, all uses are through
760
        vf = factory('id', t, access_mode='r')
1594.2.23 by Robert Collins
Test versioned file storage handling of clean/dirty status for accessed versioned files.
761
        self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
762
        self.assertRaises(errors.ReadOnlyError,
763
                          vf.add_lines_with_ghosts,
764
                          'base',
765
                          [],
766
                          [])
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
767
3316.2.9 by Robert Collins
* ``VersionedFile.get_sha1`` is deprecated, please use
768
    def test_get_sha1s(self):
1666.1.6 by Robert Collins
Make knit the default format.
769
        # check the sha1 data is available
770
        vf = self.get_file()
771
        # a simple file
772
        vf.add_lines('a', [], ['a\n'])
773
        # the same file, different metadata
774
        vf.add_lines('b', ['a'], ['a\n'])
775
        # a file differing only in last newline.
776
        vf.add_lines('c', [], ['a'])
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
777
        self.assertEqual({
778
            'a': '3f786850e387550fdab836ed7e6dc881de23001b',
779
            'c': '86f7e437faa5a7fce15d1ddcb9eaeaea377667b8',
780
            'b': '3f786850e387550fdab836ed7e6dc881de23001b',
781
            },
782
            vf.get_sha1s(['a', 'c', 'b']))
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
783
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
784
2535.3.1 by Andrew Bennetts
Add get_format_signature to VersionedFile
785
class TestWeave(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
786
787
    def get_file(self, name='foo'):
5609.9.4 by Vincent Ladeuil
Use self.get_transport instead of transport.get_transport where possible.
788
        return WeaveFile(name, self.get_transport(),
5273.1.7 by Vincent Ladeuil
No more use of the get_transport imported *symbol*, all uses are through
789
                         create=True,
790
                         get_scope=self.get_transaction)
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
791
1563.2.6 by Robert Collins
Start check tests for knits (pending), and remove dead code.
792
    def get_file_corrupted_text(self):
5609.9.4 by Vincent Ladeuil
Use self.get_transport instead of transport.get_transport where possible.
793
        w = WeaveFile('foo', self.get_transport(),
5273.1.7 by Vincent Ladeuil
No more use of the get_transport imported *symbol*, all uses are through
794
                      create=True,
795
                      get_scope=self.get_transaction)
1563.2.13 by Robert Collins
InterVersionedFile implemented.
796
        w.add_lines('v1', [], ['hello\n'])
797
        w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
798
1563.2.6 by Robert Collins
Start check tests for knits (pending), and remove dead code.
799
        # We are going to invasively corrupt the text
800
        # Make sure the internals of weave are the same
801
        self.assertEqual([('{', 0)
802
                        , 'hello\n'
803
                        , ('}', None)
804
                        , ('{', 1)
805
                        , 'there\n'
806
                        , ('}', None)
807
                        ], w._weave)
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
808
1563.2.6 by Robert Collins
Start check tests for knits (pending), and remove dead code.
809
        self.assertEqual(['f572d396fae9206628714fb2ce00f72e94f2258f'
810
                        , '90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
811
                        ], w._sha1s)
812
        w.check()
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
813
1563.2.6 by Robert Collins
Start check tests for knits (pending), and remove dead code.
814
        # Corrupted
815
        w._weave[4] = 'There\n'
816
        return w
817
818
    def get_file_corrupted_checksum(self):
819
        w = self.get_file_corrupted_text()
820
        # Corrected
821
        w._weave[4] = 'there\n'
822
        self.assertEqual('hello\nthere\n', w.get_text('v2'))
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
823
1563.2.6 by Robert Collins
Start check tests for knits (pending), and remove dead code.
824
        #Invalid checksum, first digit changed
825
        w._sha1s[1] =  'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'
826
        return w
827
1666.1.6 by Robert Collins
Make knit the default format.
828
    def reopen_file(self, name='foo', create=False):
5609.9.4 by Vincent Ladeuil
Use self.get_transport instead of transport.get_transport where possible.
829
        return WeaveFile(name, self.get_transport(),
5273.1.7 by Vincent Ladeuil
No more use of the get_transport imported *symbol*, all uses are through
830
                         create=create,
831
                         get_scope=self.get_transaction)
1563.2.9 by Robert Collins
Update versionedfile api tests to ensure that data is available after every operation.
832
1563.2.25 by Robert Collins
Merge in upstream.
833
    def test_no_implicit_create(self):
834
        self.assertRaises(errors.NoSuchFile,
835
                          WeaveFile,
836
                          'foo',
5609.9.4 by Vincent Ladeuil
Use self.get_transport instead of transport.get_transport where possible.
837
                          self.get_transport(),
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
838
                          get_scope=self.get_transaction)
1563.2.25 by Robert Collins
Merge in upstream.
839
1594.2.23 by Robert Collins
Test versioned file storage handling of clean/dirty status for accessed versioned files.
840
    def get_factory(self):
841
        return WeaveFile
842
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
843
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
844
class TestPlanMergeVersionedFile(TestCaseWithMemoryTransport):
845
846
    def setUp(self):
6552.1.4 by Vincent Ladeuil
Remaining tests matching setup(self) that can be rewritten with super().
847
        super(TestPlanMergeVersionedFile, self).setUp()
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
848
        mapper = PrefixMapper()
849
        factory = make_file_factory(True, mapper)
850
        self.vf1 = factory(self.get_transport('root-1'))
851
        self.vf2 = factory(self.get_transport('root-2'))
852
        self.plan_merge_vf = versionedfile._PlanMergeVersionedFile('root')
853
        self.plan_merge_vf.fallback_versionedfiles.extend([self.vf1, self.vf2])
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
854
855
    def test_add_lines(self):
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
856
        self.plan_merge_vf.add_lines(('root', 'a:'), [], [])
857
        self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
858
            ('root', 'a'), [], [])
859
        self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
860
            ('root', 'a:'), None, [])
861
        self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
862
            ('root', 'a:'), [], None)
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
863
864
    def setup_abcde(self):
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
865
        self.vf1.add_lines(('root', 'A'), [], ['a'])
866
        self.vf1.add_lines(('root', 'B'), [('root', 'A')], ['b'])
867
        self.vf2.add_lines(('root', 'C'), [], ['c'])
868
        self.vf2.add_lines(('root', 'D'), [('root', 'C')], ['d'])
869
        self.plan_merge_vf.add_lines(('root', 'E:'),
870
            [('root', 'B'), ('root', 'D')], ['e'])
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
871
872
    def test_get_parents(self):
873
        self.setup_abcde()
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
874
        self.assertEqual({('root', 'B'):(('root', 'A'),)},
875
            self.plan_merge_vf.get_parent_map([('root', 'B')]))
876
        self.assertEqual({('root', 'D'):(('root', 'C'),)},
877
            self.plan_merge_vf.get_parent_map([('root', 'D')]))
878
        self.assertEqual({('root', 'E:'):(('root', 'B'),('root', 'D'))},
879
            self.plan_merge_vf.get_parent_map([('root', 'E:')]))
880
        self.assertEqual({},
881
            self.plan_merge_vf.get_parent_map([('root', 'F')]))
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
882
        self.assertEqual({
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
883
                ('root', 'B'):(('root', 'A'),),
884
                ('root', 'D'):(('root', 'C'),),
885
                ('root', 'E:'):(('root', 'B'),('root', 'D')),
886
                },
887
            self.plan_merge_vf.get_parent_map(
888
                [('root', 'B'), ('root', 'D'), ('root', 'E:'), ('root', 'F')]))
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
889
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
890
    def test_get_record_stream(self):
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
891
        self.setup_abcde()
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
892
        def get_record(suffix):
6634.2.1 by Martin
Apply 2to3 next fixer and make compatible
893
            return next(self.plan_merge_vf.get_record_stream(
894
                [('root', suffix)], 'unordered', True))
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
895
        self.assertEqual('a', get_record('A').get_bytes_as('fulltext'))
896
        self.assertEqual('c', get_record('C').get_bytes_as('fulltext'))
897
        self.assertEqual('e', get_record('E:').get_bytes_as('fulltext'))
898
        self.assertEqual('absent', get_record('F').storage_kind)
1666.1.1 by Robert Collins
Add trivial http-using test for versioned files.
899
900
901
class TestReadonlyHttpMixin(object):
902
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
903
    def get_transaction(self):
904
        return 1
905
1666.1.1 by Robert Collins
Add trivial http-using test for versioned files.
906
    def test_readonly_http_works(self):
907
        # we should be able to read from http with a versioned file.
908
        vf = self.get_file()
1666.1.6 by Robert Collins
Make knit the default format.
909
        # try an empty file access
6083.1.1 by Jelmer Vernooij
Use get_transport_from_{url,path} in more places.
910
        readonly_vf = self.get_factory()('foo',
911
            transport.get_transport_from_url(self.get_readonly_url('.')))
1666.1.6 by Robert Collins
Make knit the default format.
912
        self.assertEqual([], readonly_vf.versions())
5229.1.13 by Vincent Ladeuil
Eager tests are bad, we really want two tests here.
913
914
    def test_readonly_http_works_with_feeling(self):
915
        # we should be able to read from http with a versioned file.
916
        vf = self.get_file()
1666.1.6 by Robert Collins
Make knit the default format.
917
        # now with feeling.
1666.1.1 by Robert Collins
Add trivial http-using test for versioned files.
918
        vf.add_lines('1', [], ['a\n'])
919
        vf.add_lines('2', ['1'], ['b\n', 'a\n'])
6083.1.1 by Jelmer Vernooij
Use get_transport_from_{url,path} in more places.
920
        readonly_vf = self.get_factory()('foo',
921
            transport.get_transport_from_url(self.get_readonly_url('.')))
1666.1.6 by Robert Collins
Make knit the default format.
922
        self.assertEqual(['1', '2'], vf.versions())
5229.1.14 by Vincent Ladeuil
Fixed as per jam's review :)
923
        self.assertEqual(['1', '2'], readonly_vf.versions())
1666.1.1 by Robert Collins
Add trivial http-using test for versioned files.
924
        for version in readonly_vf.versions():
925
            readonly_vf.get_lines(version)
926
927
928
class TestWeaveHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
929
930
    def get_file(self):
5609.9.4 by Vincent Ladeuil
Use self.get_transport instead of transport.get_transport where possible.
931
        return WeaveFile('foo', self.get_transport(),
5273.1.7 by Vincent Ladeuil
No more use of the get_transport imported *symbol*, all uses are through
932
                         create=True,
933
                         get_scope=self.get_transaction)
1666.1.1 by Robert Collins
Add trivial http-using test for versioned files.
934
935
    def get_factory(self):
936
        return WeaveFile
937
938
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
939
class MergeCasesMixin(object):
940
941
    def doMerge(self, base, a, b, mp):
942
        from textwrap import dedent
943
944
        def addcrlf(x):
945
            return x + '\n'
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
946
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
947
        w = self.get_file()
6631.3.1 by Martin
Run 2to3 map fixer and refactor after
948
        w.add_lines('text0', [], list(map(addcrlf, base)))
949
        w.add_lines('text1', ['text0'], list(map(addcrlf, a)))
950
        w.add_lines('text2', ['text0'], list(map(addcrlf, b)))
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
951
952
        self.log_contents(w)
953
954
        self.log('merge plan:')
955
        p = list(w.plan_merge('text1', 'text2'))
956
        for state, line in p:
957
            if line:
958
                self.log('%12s | %s' % (state, line[:-1]))
959
960
        self.log('merge:')
6621.22.2 by Martin
Use BytesIO or StringIO from bzrlib.sixish
961
        mt = BytesIO()
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
962
        mt.writelines(w.weave_merge(p))
963
        mt.seek(0)
964
        self.log(mt.getvalue())
965
6631.3.1 by Martin
Run 2to3 map fixer and refactor after
966
        mp = list(map(addcrlf, mp))
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
967
        self.assertEqual(mt.readlines(), mp)
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
968
969
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
970
    def testOneInsert(self):
971
        self.doMerge([],
972
                     ['aa'],
973
                     [],
974
                     ['aa'])
975
976
    def testSeparateInserts(self):
977
        self.doMerge(['aaa', 'bbb', 'ccc'],
978
                     ['aaa', 'xxx', 'bbb', 'ccc'],
979
                     ['aaa', 'bbb', 'yyy', 'ccc'],
980
                     ['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
981
982
    def testSameInsert(self):
983
        self.doMerge(['aaa', 'bbb', 'ccc'],
984
                     ['aaa', 'xxx', 'bbb', 'ccc'],
985
                     ['aaa', 'xxx', 'bbb', 'yyy', 'ccc'],
986
                     ['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
987
    overlappedInsertExpected = ['aaa', 'xxx', 'yyy', 'bbb']
988
    def testOverlappedInsert(self):
989
        self.doMerge(['aaa', 'bbb'],
990
                     ['aaa', 'xxx', 'yyy', 'bbb'],
991
                     ['aaa', 'xxx', 'bbb'], self.overlappedInsertExpected)
992
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
993
        # really it ought to reduce this to
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
994
        # ['aaa', 'xxx', 'yyy', 'bbb']
995
996
997
    def testClashReplace(self):
998
        self.doMerge(['aaa'],
999
                     ['xxx'],
1000
                     ['yyy', 'zzz'],
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1001
                     ['<<<<<<< ', 'xxx', '=======', 'yyy', 'zzz',
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1002
                      '>>>>>>> '])
1003
1004
    def testNonClashInsert1(self):
1005
        self.doMerge(['aaa'],
1006
                     ['xxx', 'aaa'],
1007
                     ['yyy', 'zzz'],
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1008
                     ['<<<<<<< ', 'xxx', 'aaa', '=======', 'yyy', 'zzz',
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1009
                      '>>>>>>> '])
1010
1011
    def testNonClashInsert2(self):
1012
        self.doMerge(['aaa'],
1013
                     ['aaa'],
1014
                     ['yyy', 'zzz'],
1015
                     ['yyy', 'zzz'])
1016
1017
1018
    def testDeleteAndModify(self):
1019
        """Clashing delete and modification.
1020
1021
        If one side modifies a region and the other deletes it then
1022
        there should be a conflict with one side blank.
1023
        """
1024
1025
        #######################################
1026
        # skippd, not working yet
1027
        return
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1028
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1029
        self.doMerge(['aaa', 'bbb', 'ccc'],
1030
                     ['aaa', 'ddd', 'ccc'],
1031
                     ['aaa', 'ccc'],
1032
                     ['<<<<<<<< ', 'aaa', '=======', '>>>>>>> ', 'ccc'])
1033
1034
    def _test_merge_from_strings(self, base, a, b, expected):
1035
        w = self.get_file()
1036
        w.add_lines('text0', [], base.splitlines(True))
1037
        w.add_lines('text1', ['text0'], a.splitlines(True))
1038
        w.add_lines('text2', ['text0'], b.splitlines(True))
1039
        self.log('merge plan:')
1040
        p = list(w.plan_merge('text1', 'text2'))
1041
        for state, line in p:
1042
            if line:
1043
                self.log('%12s | %s' % (state, line[:-1]))
1044
        self.log('merge result:')
1045
        result_text = ''.join(w.weave_merge(p))
1046
        self.log(result_text)
1047
        self.assertEqualDiff(result_text, expected)
1048
1049
    def test_weave_merge_conflicts(self):
1050
        # does weave merge properly handle plans that end with unchanged?
1051
        result = ''.join(self.get_file().weave_merge([('new-a', 'hello\n')]))
1052
        self.assertEqual(result, 'hello\n')
1053
1054
    def test_deletion_extended(self):
1055
        """One side deletes, the other deletes more.
1056
        """
1057
        base = """\
1058
            line 1
1059
            line 2
1060
            line 3
1061
            """
1062
        a = """\
1063
            line 1
1064
            line 2
1065
            """
1066
        b = """\
1067
            line 1
1068
            """
1069
        result = """\
1070
            line 1
4312.1.3 by John Arbash Meinel
Different sides deleting different amounts is now a conflict.
1071
<<<<<<<\x20
1072
            line 2
1073
=======
1074
>>>>>>>\x20
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1075
            """
1076
        self._test_merge_from_strings(base, a, b, result)
1077
1078
    def test_deletion_overlap(self):
1079
        """Delete overlapping regions with no other conflict.
1080
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1081
        Arguably it'd be better to treat these as agreement, rather than
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1082
        conflict, but for now conflict is safer.
1083
        """
1084
        base = """\
1085
            start context
1086
            int a() {}
1087
            int b() {}
1088
            int c() {}
1089
            end context
1090
            """
1091
        a = """\
1092
            start context
1093
            int a() {}
1094
            end context
1095
            """
1096
        b = """\
1097
            start context
1098
            int c() {}
1099
            end context
1100
            """
1101
        result = """\
1102
            start context
3943.8.2 by Marius Kruger
fix tests relying on trailing whitespace by replacing it with \x20.
1103
<<<<<<<\x20
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1104
            int a() {}
1105
=======
1106
            int c() {}
3943.8.2 by Marius Kruger
fix tests relying on trailing whitespace by replacing it with \x20.
1107
>>>>>>>\x20
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1108
            end context
1109
            """
1110
        self._test_merge_from_strings(base, a, b, result)
1111
1112
    def test_agreement_deletion(self):
1113
        """Agree to delete some lines, without conflicts."""
1114
        base = """\
1115
            start context
1116
            base line 1
1117
            base line 2
1118
            end context
1119
            """
1120
        a = """\
1121
            start context
1122
            base line 1
1123
            end context
1124
            """
1125
        b = """\
1126
            start context
1127
            base line 1
1128
            end context
1129
            """
1130
        result = """\
1131
            start context
1132
            base line 1
1133
            end context
1134
            """
1135
        self._test_merge_from_strings(base, a, b, result)
1136
1137
    def test_sync_on_deletion(self):
1138
        """Specific case of merge where we can synchronize incorrectly.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1139
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1140
        A previous version of the weave merge concluded that the two versions
1141
        agreed on deleting line 2, and this could be a synchronization point.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1142
        Line 1 was then considered in isolation, and thought to be deleted on
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1143
        both sides.
1144
1145
        It's better to consider the whole thing as a disagreement region.
1146
        """
1147
        base = """\
1148
            start context
1149
            base line 1
1150
            base line 2
1151
            end context
1152
            """
1153
        a = """\
1154
            start context
1155
            base line 1
1156
            a's replacement line 2
1157
            end context
1158
            """
1159
        b = """\
1160
            start context
1161
            b replaces
1162
            both lines
1163
            end context
1164
            """
1165
        result = """\
1166
            start context
3943.8.2 by Marius Kruger
fix tests relying on trailing whitespace by replacing it with \x20.
1167
<<<<<<<\x20
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1168
            base line 1
1169
            a's replacement line 2
1170
=======
1171
            b replaces
1172
            both lines
3943.8.2 by Marius Kruger
fix tests relying on trailing whitespace by replacing it with \x20.
1173
>>>>>>>\x20
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1174
            end context
1175
            """
1176
        self._test_merge_from_strings(base, a, b, result)
1177
1178
2535.3.1 by Andrew Bennetts
Add get_format_signature to VersionedFile
1179
class TestWeaveMerge(TestCaseWithMemoryTransport, MergeCasesMixin):
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1180
1181
    def get_file(self, name='foo'):
5609.9.4 by Vincent Ladeuil
Use self.get_transport instead of transport.get_transport where possible.
1182
        return WeaveFile(name, self.get_transport(),
5273.1.7 by Vincent Ladeuil
No more use of the get_transport imported *symbol*, all uses are through
1183
                         create=True)
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1184
1185
    def log_contents(self, w):
1186
        self.log('weave is:')
6621.22.2 by Martin
Use BytesIO or StringIO from bzrlib.sixish
1187
        tmpf = BytesIO()
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1188
        write_weave(w, tmpf)
1189
        self.log(tmpf.getvalue())
1190
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1191
    overlappedInsertExpected = ['aaa', '<<<<<<< ', 'xxx', 'yyy', '=======',
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1192
                                'xxx', '>>>>>>> ', 'bbb']
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1193
1194
1195
class TestContentFactoryAdaption(TestCaseWithMemoryTransport):
1196
1197
    def test_select_adaptor(self):
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
1198
        """Test expected adapters exist."""
1199
        # One scenario for each lookup combination we expect to use.
1200
        # Each is source_kind, requested_kind, adapter class
1201
        scenarios = [
1202
            ('knit-delta-gz', 'fulltext', _mod_knit.DeltaPlainToFullText),
1203
            ('knit-ft-gz', 'fulltext', _mod_knit.FTPlainToFullText),
1204
            ('knit-annotated-delta-gz', 'knit-delta-gz',
1205
                _mod_knit.DeltaAnnotatedToUnannotated),
1206
            ('knit-annotated-delta-gz', 'fulltext',
1207
                _mod_knit.DeltaAnnotatedToFullText),
1208
            ('knit-annotated-ft-gz', 'knit-ft-gz',
1209
                _mod_knit.FTAnnotatedToUnannotated),
1210
            ('knit-annotated-ft-gz', 'fulltext',
1211
                _mod_knit.FTAnnotatedToFullText),
1212
            ]
1213
        for source, requested, klass in scenarios:
1214
            adapter_factory = versionedfile.adapter_registry.get(
1215
                (source, requested))
1216
            adapter = adapter_factory(None)
1217
            self.assertIsInstance(adapter, klass)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1218
3350.3.5 by Robert Collins
Create adapters from plain compressed knit content.
1219
    def get_knit(self, annotated=True):
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1220
        mapper = ConstantMapper('knit')
1221
        transport = self.get_transport()
1222
        return make_file_factory(annotated, mapper)(transport)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1223
1224
    def helpGetBytes(self, f, ft_adapter, delta_adapter):
3350.3.22 by Robert Collins
Review feedback.
1225
        """Grab the interested adapted texts for tests."""
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1226
        # origin is a fulltext
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1227
        entries = f.get_record_stream([('origin',)], 'unordered', False)
6634.2.1 by Martin
Apply 2to3 next fixer and make compatible
1228
        base = next(entries)
4005.3.1 by Robert Collins
Change the signature on VersionedFiles adapters to allow less typing and more flexability inside adapters.
1229
        ft_data = ft_adapter.get_bytes(base)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1230
        # merged is both a delta and multiple parents.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1231
        entries = f.get_record_stream([('merged',)], 'unordered', False)
6634.2.1 by Martin
Apply 2to3 next fixer and make compatible
1232
        merged = next(entries)
4005.3.1 by Robert Collins
Change the signature on VersionedFiles adapters to allow less typing and more flexability inside adapters.
1233
        delta_data = delta_adapter.get_bytes(merged)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1234
        return ft_data, delta_data
1235
1236
    def test_deannotation_noeol(self):
1237
        """Test converting annotated knits to unannotated knits."""
1238
        # we need a full text, and a delta
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1239
        f = self.get_knit()
1240
        get_diamond_files(f, 1, trailing_eol=False)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1241
        ft_data, delta_data = self.helpGetBytes(f,
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
1242
            _mod_knit.FTAnnotatedToUnannotated(None),
1243
            _mod_knit.DeltaAnnotatedToUnannotated(None))
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1244
        self.assertEqual(
1245
            'version origin 1 b284f94827db1fa2970d9e2014f080413b547a7e\n'
1246
            'origin\n'
1247
            'end origin\n',
6621.22.2 by Martin
Use BytesIO or StringIO from bzrlib.sixish
1248
            GzipFile(mode='rb', fileobj=BytesIO(ft_data)).read())
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1249
        self.assertEqual(
1250
            'version merged 4 32c2e79763b3f90e8ccde37f9710b6629c25a796\n'
1251
            '1,2,3\nleft\nright\nmerged\nend merged\n',
6621.22.2 by Martin
Use BytesIO or StringIO from bzrlib.sixish
1252
            GzipFile(mode='rb', fileobj=BytesIO(delta_data)).read())
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1253
1254
    def test_deannotation(self):
1255
        """Test converting annotated knits to unannotated knits."""
1256
        # we need a full text, and a delta
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1257
        f = self.get_knit()
1258
        get_diamond_files(f, 1)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1259
        ft_data, delta_data = self.helpGetBytes(f,
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
1260
            _mod_knit.FTAnnotatedToUnannotated(None),
1261
            _mod_knit.DeltaAnnotatedToUnannotated(None))
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1262
        self.assertEqual(
1263
            'version origin 1 00e364d235126be43292ab09cb4686cf703ddc17\n'
1264
            'origin\n'
1265
            'end origin\n',
6621.22.2 by Martin
Use BytesIO or StringIO from bzrlib.sixish
1266
            GzipFile(mode='rb', fileobj=BytesIO(ft_data)).read())
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1267
        self.assertEqual(
1268
            'version merged 3 ed8bce375198ea62444dc71952b22cfc2b09226d\n'
1269
            '2,2,2\nright\nmerged\nend merged\n',
6621.22.2 by Martin
Use BytesIO or StringIO from bzrlib.sixish
1270
            GzipFile(mode='rb', fileobj=BytesIO(delta_data)).read())
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1271
1272
    def test_annotated_to_fulltext_no_eol(self):
1273
        """Test adapting annotated knits to full texts (for -> weaves)."""
1274
        # we need a full text, and a delta
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1275
        f = self.get_knit()
1276
        get_diamond_files(f, 1, trailing_eol=False)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1277
        # Reconstructing a full text requires a backing versioned file, and it
1278
        # must have the base lines requested from it.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1279
        logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1280
        ft_data, delta_data = self.helpGetBytes(f,
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
1281
            _mod_knit.FTAnnotatedToFullText(None),
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1282
            _mod_knit.DeltaAnnotatedToFullText(logged_vf))
1283
        self.assertEqual('origin', ft_data)
1284
        self.assertEqual('base\nleft\nright\nmerged', delta_data)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1285
        self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1286
            True)], logged_vf.calls)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1287
1288
    def test_annotated_to_fulltext(self):
1289
        """Test adapting annotated knits to full texts (for -> weaves)."""
1290
        # we need a full text, and a delta
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1291
        f = self.get_knit()
1292
        get_diamond_files(f, 1)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1293
        # Reconstructing a full text requires a backing versioned file, and it
1294
        # must have the base lines requested from it.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1295
        logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1296
        ft_data, delta_data = self.helpGetBytes(f,
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
1297
            _mod_knit.FTAnnotatedToFullText(None),
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1298
            _mod_knit.DeltaAnnotatedToFullText(logged_vf))
1299
        self.assertEqual('origin\n', ft_data)
1300
        self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1301
        self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1302
            True)], logged_vf.calls)
3350.3.5 by Robert Collins
Create adapters from plain compressed knit content.
1303
1304
    def test_unannotated_to_fulltext(self):
1305
        """Test adapting unannotated knits to full texts.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1306
3350.3.5 by Robert Collins
Create adapters from plain compressed knit content.
1307
        This is used for -> weaves, and for -> annotated knits.
1308
        """
1309
        # we need a full text, and a delta
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1310
        f = self.get_knit(annotated=False)
1311
        get_diamond_files(f, 1)
3350.3.5 by Robert Collins
Create adapters from plain compressed knit content.
1312
        # Reconstructing a full text requires a backing versioned file, and it
1313
        # must have the base lines requested from it.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1314
        logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
3350.3.5 by Robert Collins
Create adapters from plain compressed knit content.
1315
        ft_data, delta_data = self.helpGetBytes(f,
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
1316
            _mod_knit.FTPlainToFullText(None),
3350.3.5 by Robert Collins
Create adapters from plain compressed knit content.
1317
            _mod_knit.DeltaPlainToFullText(logged_vf))
1318
        self.assertEqual('origin\n', ft_data)
1319
        self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1320
        self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1321
            True)], logged_vf.calls)
3350.3.5 by Robert Collins
Create adapters from plain compressed knit content.
1322
3350.3.6 by Robert Collins
Test EOL behaviour of plain knit record adapters.
1323
    def test_unannotated_to_fulltext_no_eol(self):
1324
        """Test adapting unannotated knits to full texts.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1325
3350.3.6 by Robert Collins
Test EOL behaviour of plain knit record adapters.
1326
        This is used for -> weaves, and for -> annotated knits.
1327
        """
1328
        # we need a full text, and a delta
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1329
        f = self.get_knit(annotated=False)
1330
        get_diamond_files(f, 1, trailing_eol=False)
3350.3.6 by Robert Collins
Test EOL behaviour of plain knit record adapters.
1331
        # Reconstructing a full text requires a backing versioned file, and it
1332
        # must have the base lines requested from it.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1333
        logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
3350.3.6 by Robert Collins
Test EOL behaviour of plain knit record adapters.
1334
        ft_data, delta_data = self.helpGetBytes(f,
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
1335
            _mod_knit.FTPlainToFullText(None),
3350.3.6 by Robert Collins
Test EOL behaviour of plain knit record adapters.
1336
            _mod_knit.DeltaPlainToFullText(logged_vf))
1337
        self.assertEqual('origin', ft_data)
1338
        self.assertEqual('base\nleft\nright\nmerged', delta_data)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1339
        self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1340
            True)], logged_vf.calls)
3350.3.6 by Robert Collins
Test EOL behaviour of plain knit record adapters.
1341
3350.6.1 by Robert Collins
* New ``versionedfile.KeyMapper`` interface to abstract out the access to
1342
1343
class TestKeyMapper(TestCaseWithMemoryTransport):
1344
    """Tests for various key mapping logic."""
1345
1346
    def test_identity_mapper(self):
1347
        mapper = versionedfile.ConstantMapper("inventory")
1348
        self.assertEqual("inventory", mapper.map(('foo@ar',)))
1349
        self.assertEqual("inventory", mapper.map(('quux',)))
1350
1351
    def test_prefix_mapper(self):
1352
        #format5: plain
1353
        mapper = versionedfile.PrefixMapper()
1354
        self.assertEqual("file-id", mapper.map(("file-id", "revision-id")))
1355
        self.assertEqual("new-id", mapper.map(("new-id", "revision-id")))
1356
        self.assertEqual(('file-id',), mapper.unmap("file-id"))
1357
        self.assertEqual(('new-id',), mapper.unmap("new-id"))
1358
1359
    def test_hash_prefix_mapper(self):
1360
        #format6: hash + plain
1361
        mapper = versionedfile.HashPrefixMapper()
1362
        self.assertEqual("9b/file-id", mapper.map(("file-id", "revision-id")))
1363
        self.assertEqual("45/new-id", mapper.map(("new-id", "revision-id")))
1364
        self.assertEqual(('file-id',), mapper.unmap("9b/file-id"))
1365
        self.assertEqual(('new-id',), mapper.unmap("45/new-id"))
1366
1367
    def test_hash_escaped_mapper(self):
1368
        #knit1: hash + escaped
1369
        mapper = versionedfile.HashEscapedPrefixMapper()
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1370
        self.assertEqual("88/%2520", mapper.map((" ", "revision-id")))
1371
        self.assertEqual("ed/fil%2545-%2549d", mapper.map(("filE-Id",
1372
            "revision-id")))
1373
        self.assertEqual("88/ne%2557-%2549d", mapper.map(("neW-Id",
1374
            "revision-id")))
1375
        self.assertEqual(('filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))
1376
        self.assertEqual(('neW-Id',), mapper.unmap("88/ne%2557-%2549d"))
3350.6.2 by Robert Collins
Prepare parameterised test environment.
1377
1378
1379
class TestVersionedFiles(TestCaseWithMemoryTransport):
1380
    """Tests for the multiple-file variant of VersionedFile."""
1381
5559.2.2 by Martin Pool
Change to using standard load_tests_apply_scenarios.
1382
    # We want to be sure of behaviour for:
1383
    # weaves prefix layout (weave texts)
1384
    # individually named weaves (weave inventories)
1385
    # annotated knits - prefix|hash|hash-escape layout, we test the third only
1386
    #                   as it is the most complex mapper.
1387
    # individually named knits
1388
    # individual no-graph knits in packs (signatures)
1389
    # individual graph knits in packs (inventories)
1390
    # individual graph nocompression knits in packs (revisions)
1391
    # plain text knits in packs (texts)
1392
    len_one_scenarios = [
1393
        ('weave-named', {
1394
            'cleanup':None,
1395
            'factory':make_versioned_files_factory(WeaveFile,
1396
                ConstantMapper('inventory')),
1397
            'graph':True,
1398
            'key_length':1,
1399
            'support_partial_insertion': False,
1400
            }),
1401
        ('named-knit', {
1402
            'cleanup':None,
1403
            'factory':make_file_factory(False, ConstantMapper('revisions')),
1404
            'graph':True,
1405
            'key_length':1,
1406
            'support_partial_insertion': False,
1407
            }),
1408
        ('named-nograph-nodelta-knit-pack', {
1409
            'cleanup':cleanup_pack_knit,
1410
            'factory':make_pack_factory(False, False, 1),
1411
            'graph':False,
1412
            'key_length':1,
1413
            'support_partial_insertion': False,
1414
            }),
1415
        ('named-graph-knit-pack', {
1416
            'cleanup':cleanup_pack_knit,
1417
            'factory':make_pack_factory(True, True, 1),
1418
            'graph':True,
1419
            'key_length':1,
1420
            'support_partial_insertion': True,
1421
            }),
1422
        ('named-graph-nodelta-knit-pack', {
1423
            'cleanup':cleanup_pack_knit,
1424
            'factory':make_pack_factory(True, False, 1),
1425
            'graph':True,
1426
            'key_length':1,
1427
            'support_partial_insertion': False,
1428
            }),
1429
        ('groupcompress-nograph', {
1430
            'cleanup':groupcompress.cleanup_pack_group,
1431
            'factory':groupcompress.make_pack_factory(False, False, 1),
1432
            'graph': False,
1433
            'key_length':1,
1434
            'support_partial_insertion':False,
1435
            }),
1436
        ]
1437
    len_two_scenarios = [
1438
        ('weave-prefix', {
1439
            'cleanup':None,
1440
            'factory':make_versioned_files_factory(WeaveFile,
1441
                PrefixMapper()),
1442
            'graph':True,
1443
            'key_length':2,
1444
            'support_partial_insertion': False,
1445
            }),
1446
        ('annotated-knit-escape', {
1447
            'cleanup':None,
1448
            'factory':make_file_factory(True, HashEscapedPrefixMapper()),
1449
            'graph':True,
1450
            'key_length':2,
1451
            'support_partial_insertion': False,
1452
            }),
1453
        ('plain-knit-pack', {
1454
            'cleanup':cleanup_pack_knit,
1455
            'factory':make_pack_factory(True, True, 2),
1456
            'graph':True,
1457
            'key_length':2,
1458
            'support_partial_insertion': True,
1459
            }),
1460
        ('groupcompress', {
1461
            'cleanup':groupcompress.cleanup_pack_group,
1462
            'factory':groupcompress.make_pack_factory(True, False, 1),
1463
            'graph': True,
1464
            'key_length':1,
1465
            'support_partial_insertion':False,
1466
            }),
1467
        ]
1468
1469
    scenarios = len_one_scenarios + len_two_scenarios
1470
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1471
    def get_versionedfiles(self, relpath='files'):
1472
        transport = self.get_transport(relpath)
1473
        if relpath != '.':
1474
            transport.mkdir('.')
1475
        files = self.factory(transport)
1476
        if self.cleanup is not None:
4985.2.1 by Vincent Ladeuil
Deploy addAttrCleanup on the whole test suite.
1477
            self.addCleanup(self.cleanup, files)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1478
        return files
1479
4398.8.8 by John Arbash Meinel
Respond to Andrew's review comments.
1480
    def get_simple_key(self, suffix):
1481
        """Return a key for the object under test."""
1482
        if self.key_length == 1:
1483
            return (suffix,)
1484
        else:
1485
            return ('FileA',) + (suffix,)
1486
5816.8.2 by Andrew Bennetts
Add test for without_fallbacks.
1487
    def test_add_fallback_implies_without_fallbacks(self):
1488
        f = self.get_versionedfiles('files')
1489
        if getattr(f, 'add_fallback_versioned_files', None) is None:
1490
            raise TestNotApplicable("%s doesn't support fallbacks"
1491
                                    % (f.__class__.__name__,))
1492
        g = self.get_versionedfiles('fallback')
1493
        key_a = self.get_simple_key('a')
1494
        g.add_lines(key_a, [], ['\n'])
1495
        f.add_fallback_versioned_files(g)
1496
        self.assertTrue(key_a in f.get_parent_map([key_a]))
1497
        self.assertFalse(key_a in f.without_fallbacks().get_parent_map([key_a]))
1498
4398.8.1 by John Arbash Meinel
Add a VersionedFile.add_text() api.
1499
    def test_add_lines(self):
1500
        f = self.get_versionedfiles()
4398.8.8 by John Arbash Meinel
Respond to Andrew's review comments.
1501
        key0 = self.get_simple_key('r0')
1502
        key1 = self.get_simple_key('r1')
1503
        key2 = self.get_simple_key('r2')
1504
        keyf = self.get_simple_key('foo')
4398.8.1 by John Arbash Meinel
Add a VersionedFile.add_text() api.
1505
        f.add_lines(key0, [], ['a\n', 'b\n'])
1506
        if self.graph:
1507
            f.add_lines(key1, [key0], ['b\n', 'c\n'])
1508
        else:
1509
            f.add_lines(key1, [], ['b\n', 'c\n'])
1510
        keys = f.keys()
1511
        self.assertTrue(key0 in keys)
1512
        self.assertTrue(key1 in keys)
1513
        records = []
1514
        for record in f.get_record_stream([key0, key1], 'unordered', True):
1515
            records.append((record.key, record.get_bytes_as('fulltext')))
1516
        records.sort()
1517
        self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1518
4398.8.6 by John Arbash Meinel
Switch the api from VF.add_text to VF._add_text and trim some extra 'features'.
1519
    def test__add_text(self):
4398.8.1 by John Arbash Meinel
Add a VersionedFile.add_text() api.
1520
        f = self.get_versionedfiles()
4398.8.8 by John Arbash Meinel
Respond to Andrew's review comments.
1521
        key0 = self.get_simple_key('r0')
1522
        key1 = self.get_simple_key('r1')
1523
        key2 = self.get_simple_key('r2')
1524
        keyf = self.get_simple_key('foo')
4398.8.6 by John Arbash Meinel
Switch the api from VF.add_text to VF._add_text and trim some extra 'features'.
1525
        f._add_text(key0, [], 'a\nb\n')
4398.8.1 by John Arbash Meinel
Add a VersionedFile.add_text() api.
1526
        if self.graph:
4398.8.6 by John Arbash Meinel
Switch the api from VF.add_text to VF._add_text and trim some extra 'features'.
1527
            f._add_text(key1, [key0], 'b\nc\n')
4398.8.1 by John Arbash Meinel
Add a VersionedFile.add_text() api.
1528
        else:
4398.8.6 by John Arbash Meinel
Switch the api from VF.add_text to VF._add_text and trim some extra 'features'.
1529
            f._add_text(key1, [], 'b\nc\n')
4398.8.1 by John Arbash Meinel
Add a VersionedFile.add_text() api.
1530
        keys = f.keys()
1531
        self.assertTrue(key0 in keys)
1532
        self.assertTrue(key1 in keys)
1533
        records = []
1534
        for record in f.get_record_stream([key0, key1], 'unordered', True):
1535
            records.append((record.key, record.get_bytes_as('fulltext')))
1536
        records.sort()
1537
        self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1538
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1539
    def test_annotate(self):
1540
        files = self.get_versionedfiles()
1541
        self.get_diamond_files(files)
1542
        if self.key_length == 1:
1543
            prefix = ()
1544
        else:
1545
            prefix = ('FileA',)
1546
        # introduced full text
1547
        origins = files.annotate(prefix + ('origin',))
1548
        self.assertEqual([
1549
            (prefix + ('origin',), 'origin\n')],
1550
            origins)
1551
        # a delta
1552
        origins = files.annotate(prefix + ('base',))
1553
        self.assertEqual([
1554
            (prefix + ('base',), 'base\n')],
1555
            origins)
1556
        # a merge
1557
        origins = files.annotate(prefix + ('merged',))
1558
        if self.graph:
1559
            self.assertEqual([
1560
                (prefix + ('base',), 'base\n'),
1561
                (prefix + ('left',), 'left\n'),
1562
                (prefix + ('right',), 'right\n'),
1563
                (prefix + ('merged',), 'merged\n')
1564
                ],
1565
                origins)
1566
        else:
1567
            # Without a graph everything is new.
1568
            self.assertEqual([
1569
                (prefix + ('merged',), 'base\n'),
1570
                (prefix + ('merged',), 'left\n'),
1571
                (prefix + ('merged',), 'right\n'),
1572
                (prefix + ('merged',), 'merged\n')
1573
                ],
1574
                origins)
1575
        self.assertRaises(RevisionNotPresent,
1576
            files.annotate, prefix + ('missing-key',))
1577
4332.3.26 by Robert Collins
Allow passing keys to check to VersionedFile.check().
1578
    def test_check_no_parameters(self):
1579
        files = self.get_versionedfiles()
1580
1581
    def test_check_progressbar_parameter(self):
1582
        """A progress bar can be supplied because check can be a generator."""
1583
        pb = ui.ui_factory.nested_progress_bar()
1584
        self.addCleanup(pb.finished)
1585
        files = self.get_versionedfiles()
1586
        files.check(progress_bar=pb)
1587
1588
    def test_check_with_keys_becomes_generator(self):
4454.3.65 by John Arbash Meinel
Tests that VF implementations support .get_annotator()
1589
        files = self.get_versionedfiles()
1590
        self.get_diamond_files(files)
4332.3.26 by Robert Collins
Allow passing keys to check to VersionedFile.check().
1591
        keys = files.keys()
1592
        entries = files.check(keys=keys)
1593
        seen = set()
1594
        # Texts output should be fulltexts.
1595
        self.capture_stream(files, entries, seen.add,
1596
            files.get_parent_map(keys), require_fulltext=True)
1597
        # All texts should be output.
1598
        self.assertEqual(set(keys), seen)
4454.3.65 by John Arbash Meinel
Tests that VF implementations support .get_annotator()
1599
4744.2.5 by John Arbash Meinel
Change to a generic 'VersionedFiles.clear_cache()' api.
1600
    def test_clear_cache(self):
1601
        files = self.get_versionedfiles()
1602
        files.clear_cache()
1603
3350.6.2 by Robert Collins
Prepare parameterised test environment.
1604
    def test_construct(self):
1605
        """Each parameterised test can be constructed on a transport."""
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1606
        files = self.get_versionedfiles()
1607
4241.4.1 by Ian Clatworthy
add sha generation support to versionedfiles
1608
    def get_diamond_files(self, files, trailing_eol=True, left_only=False,
1609
        nokeys=False):
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1610
        return get_diamond_files(files, self.key_length,
1611
            trailing_eol=trailing_eol, nograph=not self.graph,
4241.4.1 by Ian Clatworthy
add sha generation support to versionedfiles
1612
            left_only=left_only, nokeys=nokeys)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1613
4398.8.4 by John Arbash Meinel
Implement add_text for GroupCompressVersionedFiles
1614
    def _add_content_nostoresha(self, add_lines):
4119.1.1 by John Arbash Meinel
Move the 'add_lines_nostoresha' to being tested against all VF implementations.
1615
        """When nostore_sha is supplied using old content raises."""
1616
        vf = self.get_versionedfiles()
1617
        empty_text = ('a', [])
1618
        sample_text_nl = ('b', ["foo\n", "bar\n"])
1619
        sample_text_no_nl = ('c', ["foo\n", "bar"])
1620
        shas = []
1621
        for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
4398.8.4 by John Arbash Meinel
Implement add_text for GroupCompressVersionedFiles
1622
            if add_lines:
1623
                sha, _, _ = vf.add_lines(self.get_simple_key(version), [],
1624
                                         lines)
1625
            else:
4398.8.6 by John Arbash Meinel
Switch the api from VF.add_text to VF._add_text and trim some extra 'features'.
1626
                sha, _, _ = vf._add_text(self.get_simple_key(version), [],
1627
                                         ''.join(lines))
4119.1.1 by John Arbash Meinel
Move the 'add_lines_nostoresha' to being tested against all VF implementations.
1628
            shas.append(sha)
1629
        # we now have a copy of all the lines in the vf.
1630
        for sha, (version, lines) in zip(
1631
            shas, (empty_text, sample_text_nl, sample_text_no_nl)):
1632
            new_key = self.get_simple_key(version + "2")
1633
            self.assertRaises(errors.ExistingContent,
1634
                vf.add_lines, new_key, [], lines,
1635
                nostore_sha=sha)
4398.8.4 by John Arbash Meinel
Implement add_text for GroupCompressVersionedFiles
1636
            self.assertRaises(errors.ExistingContent,
4398.8.6 by John Arbash Meinel
Switch the api from VF.add_text to VF._add_text and trim some extra 'features'.
1637
                vf._add_text, new_key, [], ''.join(lines),
4398.8.4 by John Arbash Meinel
Implement add_text for GroupCompressVersionedFiles
1638
                nostore_sha=sha)
4119.1.1 by John Arbash Meinel
Move the 'add_lines_nostoresha' to being tested against all VF implementations.
1639
            # and no new version should have been added.
6634.2.1 by Martin
Apply 2to3 next fixer and make compatible
1640
            record = next(vf.get_record_stream([new_key], 'unordered', True))
4119.1.1 by John Arbash Meinel
Move the 'add_lines_nostoresha' to being tested against all VF implementations.
1641
            self.assertEqual('absent', record.storage_kind)
1642
4398.8.4 by John Arbash Meinel
Implement add_text for GroupCompressVersionedFiles
1643
    def test_add_lines_nostoresha(self):
1644
        self._add_content_nostoresha(add_lines=True)
1645
4398.8.6 by John Arbash Meinel
Switch the api from VF.add_text to VF._add_text and trim some extra 'features'.
1646
    def test__add_text_nostoresha(self):
4398.8.4 by John Arbash Meinel
Implement add_text for GroupCompressVersionedFiles
1647
        self._add_content_nostoresha(add_lines=False)
1648
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1649
    def test_add_lines_return(self):
1650
        files = self.get_versionedfiles()
1651
        # save code by using the stock data insertion helper.
1652
        adds = self.get_diamond_files(files)
1653
        results = []
1654
        # We can only validate the first 2 elements returned from add_lines.
1655
        for add in adds:
1656
            self.assertEqual(3, len(add))
1657
            results.append(add[:2])
1658
        if self.key_length == 1:
1659
            self.assertEqual([
1660
                ('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1661
                ('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1662
                ('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1663
                ('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1664
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1665
                results)
1666
        elif self.key_length == 2:
1667
            self.assertEqual([
1668
                ('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1669
                ('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1670
                ('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1671
                ('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1672
                ('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1673
                ('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1674
                ('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1675
                ('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1676
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1677
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1678
                results)
1679
4241.4.1 by Ian Clatworthy
add sha generation support to versionedfiles
1680
    def test_add_lines_no_key_generates_chk_key(self):
1681
        files = self.get_versionedfiles()
1682
        # save code by using the stock data insertion helper.
1683
        adds = self.get_diamond_files(files, nokeys=True)
1684
        results = []
1685
        # We can only validate the first 2 elements returned from add_lines.
1686
        for add in adds:
1687
            self.assertEqual(3, len(add))
1688
            results.append(add[:2])
1689
        if self.key_length == 1:
1690
            self.assertEqual([
1691
                ('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1692
                ('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1693
                ('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1694
                ('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1695
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1696
                results)
1697
            # Check the added items got CHK keys.
6619.3.12 by Jelmer Vernooij
Use 2to3 set_literal fixer.
1698
            self.assertEqual({
4241.4.1 by Ian Clatworthy
add sha generation support to versionedfiles
1699
                ('sha1:00e364d235126be43292ab09cb4686cf703ddc17',),
1700
                ('sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',),
1701
                ('sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',),
1702
                ('sha1:a8478686da38e370e32e42e8a0c220e33ee9132f',),
1703
                ('sha1:ed8bce375198ea62444dc71952b22cfc2b09226d',),
6619.3.12 by Jelmer Vernooij
Use 2to3 set_literal fixer.
1704
                },
4241.4.1 by Ian Clatworthy
add sha generation support to versionedfiles
1705
                files.keys())
1706
        elif self.key_length == 2:
1707
            self.assertEqual([
1708
                ('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1709
                ('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1710
                ('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1711
                ('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1712
                ('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1713
                ('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1714
                ('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1715
                ('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1716
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1717
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1718
                results)
1719
            # Check the added items got CHK keys.
6619.3.12 by Jelmer Vernooij
Use 2to3 set_literal fixer.
1720
            self.assertEqual({
4241.4.1 by Ian Clatworthy
add sha generation support to versionedfiles
1721
                ('FileA', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1722
                ('FileA', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1723
                ('FileA', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1724
                ('FileA', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1725
                ('FileA', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1726
                ('FileB', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1727
                ('FileB', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1728
                ('FileB', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1729
                ('FileB', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1730
                ('FileB', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
6619.3.12 by Jelmer Vernooij
Use 2to3 set_literal fixer.
1731
                },
4241.4.1 by Ian Clatworthy
add sha generation support to versionedfiles
1732
                files.keys())
1733
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1734
    def test_empty_lines(self):
1735
        """Empty files can be stored."""
1736
        f = self.get_versionedfiles()
1737
        key_a = self.get_simple_key('a')
1738
        f.add_lines(key_a, [], [])
1739
        self.assertEqual('',
1740
            f.get_record_stream([key_a], 'unordered', True
1741
                ).next().get_bytes_as('fulltext'))
1742
        key_b = self.get_simple_key('b')
1743
        f.add_lines(key_b, self.get_parents([key_a]), [])
1744
        self.assertEqual('',
1745
            f.get_record_stream([key_b], 'unordered', True
1746
                ).next().get_bytes_as('fulltext'))
1747
1748
    def test_newline_only(self):
1749
        f = self.get_versionedfiles()
1750
        key_a = self.get_simple_key('a')
1751
        f.add_lines(key_a, [], ['\n'])
1752
        self.assertEqual('\n',
1753
            f.get_record_stream([key_a], 'unordered', True
1754
                ).next().get_bytes_as('fulltext'))
1755
        key_b = self.get_simple_key('b')
1756
        f.add_lines(key_b, self.get_parents([key_a]), ['\n'])
1757
        self.assertEqual('\n',
1758
            f.get_record_stream([key_b], 'unordered', True
1759
                ).next().get_bytes_as('fulltext'))
1760
4593.5.35 by John Arbash Meinel
Start working on a per-vf implementation test of find_ancestry.
1761
    def test_get_known_graph_ancestry(self):
1762
        f = self.get_versionedfiles()
4593.5.36 by John Arbash Meinel
a few more implementations of the interface.
1763
        if not self.graph:
1764
            raise TestNotApplicable('ancestry info only relevant with graph.')
4593.5.35 by John Arbash Meinel
Start working on a per-vf implementation test of find_ancestry.
1765
        key_a = self.get_simple_key('a')
1766
        key_b = self.get_simple_key('b')
1767
        key_c = self.get_simple_key('c')
1768
        # A
1769
        # |\
1770
        # | B
1771
        # |/
1772
        # C
1773
        f.add_lines(key_a, [], ['\n'])
1774
        f.add_lines(key_b, [key_a], ['\n'])
1775
        f.add_lines(key_c, [key_a, key_b], ['\n'])
1776
        kg = f.get_known_graph_ancestry([key_c])
1777
        self.assertIsInstance(kg, _mod_graph.KnownGraph)
1778
        self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
1779
4634.11.2 by John Arbash Meinel
Teach VF.get_known_graph_ancestry to go to fallbacks (bug #419241)
1780
    def test_known_graph_with_fallbacks(self):
1781
        f = self.get_versionedfiles('files')
1782
        if not self.graph:
1783
            raise TestNotApplicable('ancestry info only relevant with graph.')
1784
        if getattr(f, 'add_fallback_versioned_files', None) is None:
1785
            raise TestNotApplicable("%s doesn't support fallbacks"
1786
                                    % (f.__class__.__name__,))
1787
        key_a = self.get_simple_key('a')
1788
        key_b = self.get_simple_key('b')
1789
        key_c = self.get_simple_key('c')
1790
        # A     only in fallback
1791
        # |\
1792
        # | B
1793
        # |/
1794
        # C
1795
        g = self.get_versionedfiles('fallback')
1796
        g.add_lines(key_a, [], ['\n'])
1797
        f.add_fallback_versioned_files(g)
1798
        f.add_lines(key_b, [key_a], ['\n'])
1799
        f.add_lines(key_c, [key_a, key_b], ['\n'])
1800
        kg = f.get_known_graph_ancestry([key_c])
1801
        self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
1802
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1803
    def test_get_record_stream_empty(self):
1804
        """An empty stream can be requested without error."""
1805
        f = self.get_versionedfiles()
1806
        entries = f.get_record_stream([], 'unordered', False)
1807
        self.assertEqual([], list(entries))
1808
1809
    def assertValidStorageKind(self, storage_kind):
1810
        """Assert that storage_kind is a valid storage_kind."""
1811
        self.assertSubset([storage_kind],
1812
            ['mpdiff', 'knit-annotated-ft', 'knit-annotated-delta',
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
1813
             'knit-ft', 'knit-delta', 'chunked', 'fulltext',
1814
             'knit-annotated-ft-gz', 'knit-annotated-delta-gz', 'knit-ft-gz',
4005.3.6 by Robert Collins
Support delta_closure=True with NetworkRecordStream to transmit deltas over the wire when full text extraction is required on the far end.
1815
             'knit-delta-gz',
3735.32.12 by John Arbash Meinel
Add groupcompress-block[-ref] as valid stream types.
1816
             'knit-delta-closure', 'knit-delta-closure-ref',
1817
             'groupcompress-block', 'groupcompress-block-ref'])
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1818
4332.3.26 by Robert Collins
Allow passing keys to check to VersionedFile.check().
1819
    def capture_stream(self, f, entries, on_seen, parents,
1820
        require_fulltext=False):
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1821
        """Capture a stream for testing."""
1822
        for factory in entries:
1823
            on_seen(factory.key)
1824
            self.assertValidStorageKind(factory.storage_kind)
4241.4.1 by Ian Clatworthy
add sha generation support to versionedfiles
1825
            if factory.sha1 is not None:
1826
                self.assertEqual(f.get_sha1s([factory.key])[factory.key],
1827
                    factory.sha1)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1828
            self.assertEqual(parents[factory.key], factory.parents)
1829
            self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1830
                str)
4332.3.26 by Robert Collins
Allow passing keys to check to VersionedFile.check().
1831
            if require_fulltext:
1832
                factory.get_bytes_as('fulltext')
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1833
1834
    def test_get_record_stream_interface(self):
1835
        """each item in a stream has to provide a regular interface."""
1836
        files = self.get_versionedfiles()
1837
        self.get_diamond_files(files)
1838
        keys, _ = self.get_keys_and_sort_order()
1839
        parent_map = files.get_parent_map(keys)
1840
        entries = files.get_record_stream(keys, 'unordered', False)
1841
        seen = set()
1842
        self.capture_stream(files, entries, seen.add, parent_map)
1843
        self.assertEqual(set(keys), seen)
1844
1845
    def get_keys_and_sort_order(self):
1846
        """Get diamond test keys list, and their sort ordering."""
1847
        if self.key_length == 1:
1848
            keys = [('merged',), ('left',), ('right',), ('base',)]
1849
            sort_order = {('merged',):2, ('left',):1, ('right',):1, ('base',):0}
1850
        else:
1851
            keys = [
1852
                ('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1853
                ('FileA', 'base'),
1854
                ('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1855
                ('FileB', 'base'),
1856
                ]
1857
            sort_order = {
1858
                ('FileA', 'merged'):2, ('FileA', 'left'):1, ('FileA', 'right'):1,
1859
                ('FileA', 'base'):0,
1860
                ('FileB', 'merged'):2, ('FileB', 'left'):1, ('FileB', 'right'):1,
1861
                ('FileB', 'base'):0,
1862
                }
1863
        return keys, sort_order
1864
4111.1.1 by Robert Collins
Add a groupcompress sort order.
1865
    def get_keys_and_groupcompress_sort_order(self):
1866
        """Get diamond test keys list, and their groupcompress sort ordering."""
1867
        if self.key_length == 1:
1868
            keys = [('merged',), ('left',), ('right',), ('base',)]
1869
            sort_order = {('merged',):0, ('left',):1, ('right',):1, ('base',):2}
1870
        else:
1871
            keys = [
1872
                ('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1873
                ('FileA', 'base'),
1874
                ('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1875
                ('FileB', 'base'),
1876
                ]
1877
            sort_order = {
1878
                ('FileA', 'merged'):0, ('FileA', 'left'):1, ('FileA', 'right'):1,
1879
                ('FileA', 'base'):2,
1880
                ('FileB', 'merged'):3, ('FileB', 'left'):4, ('FileB', 'right'):4,
1881
                ('FileB', 'base'):5,
1882
                }
1883
        return keys, sort_order
1884
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1885
    def test_get_record_stream_interface_ordered(self):
1886
        """each item in a stream has to provide a regular interface."""
1887
        files = self.get_versionedfiles()
1888
        self.get_diamond_files(files)
1889
        keys, sort_order = self.get_keys_and_sort_order()
1890
        parent_map = files.get_parent_map(keys)
1891
        entries = files.get_record_stream(keys, 'topological', False)
1892
        seen = []
1893
        self.capture_stream(files, entries, seen.append, parent_map)
1894
        self.assertStreamOrder(sort_order, seen, keys)
1895
1896
    def test_get_record_stream_interface_ordered_with_delta_closure(self):
1897
        """each item must be accessible as a fulltext."""
1898
        files = self.get_versionedfiles()
1899
        self.get_diamond_files(files)
1900
        keys, sort_order = self.get_keys_and_sort_order()
1901
        parent_map = files.get_parent_map(keys)
1902
        entries = files.get_record_stream(keys, 'topological', True)
1903
        seen = []
1904
        for factory in entries:
1905
            seen.append(factory.key)
1906
            self.assertValidStorageKind(factory.storage_kind)
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
1907
            self.assertSubset([factory.sha1],
1908
                [None, files.get_sha1s([factory.key])[factory.key]])
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1909
            self.assertEqual(parent_map[factory.key], factory.parents)
1910
            # self.assertEqual(files.get_text(factory.key),
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
1911
            ft_bytes = factory.get_bytes_as('fulltext')
1912
            self.assertIsInstance(ft_bytes, str)
1913
            chunked_bytes = factory.get_bytes_as('chunked')
1914
            self.assertEqualDiff(ft_bytes, ''.join(chunked_bytes))
1915
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1916
        self.assertStreamOrder(sort_order, seen, keys)
1917
4111.1.1 by Robert Collins
Add a groupcompress sort order.
1918
    def test_get_record_stream_interface_groupcompress(self):
1919
        """each item in a stream has to provide a regular interface."""
1920
        files = self.get_versionedfiles()
1921
        self.get_diamond_files(files)
1922
        keys, sort_order = self.get_keys_and_groupcompress_sort_order()
1923
        parent_map = files.get_parent_map(keys)
1924
        entries = files.get_record_stream(keys, 'groupcompress', False)
1925
        seen = []
1926
        self.capture_stream(files, entries, seen.append, parent_map)
1927
        self.assertStreamOrder(sort_order, seen, keys)
1928
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1929
    def assertStreamOrder(self, sort_order, seen, keys):
1930
        self.assertEqual(len(set(seen)), len(keys))
1931
        if self.key_length == 1:
1932
            lows = {():0}
1933
        else:
1934
            lows = {('FileA',):0, ('FileB',):0}
1935
        if not self.graph:
1936
            self.assertEqual(set(keys), set(seen))
1937
        else:
1938
            for key in seen:
1939
                sort_pos = sort_order[key]
1940
                self.assertTrue(sort_pos >= lows[key[:-1]],
1941
                    "Out of order in sorted stream: %r, %r" % (key, seen))
1942
                lows[key[:-1]] = sort_pos
1943
1944
    def test_get_record_stream_unknown_storage_kind_raises(self):
1945
        """Asking for a storage kind that the stream cannot supply raises."""
1946
        files = self.get_versionedfiles()
1947
        self.get_diamond_files(files)
1948
        if self.key_length == 1:
1949
            keys = [('merged',), ('left',), ('right',), ('base',)]
1950
        else:
1951
            keys = [
1952
                ('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1953
                ('FileA', 'base'),
1954
                ('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1955
                ('FileB', 'base'),
1956
                ]
1957
        parent_map = files.get_parent_map(keys)
1958
        entries = files.get_record_stream(keys, 'unordered', False)
1959
        # We track the contents because we should be able to try, fail a
1960
        # particular kind and then ask for one that works and continue.
1961
        seen = set()
1962
        for factory in entries:
1963
            seen.add(factory.key)
1964
            self.assertValidStorageKind(factory.storage_kind)
4241.4.1 by Ian Clatworthy
add sha generation support to versionedfiles
1965
            if factory.sha1 is not None:
1966
                self.assertEqual(files.get_sha1s([factory.key])[factory.key],
1967
                                 factory.sha1)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1968
            self.assertEqual(parent_map[factory.key], factory.parents)
1969
            # currently no stream emits mpdiff
1970
            self.assertRaises(errors.UnavailableRepresentation,
1971
                factory.get_bytes_as, 'mpdiff')
1972
            self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1973
                str)
1974
        self.assertEqual(set(keys), seen)
1975
1976
    def test_get_record_stream_missing_records_are_absent(self):
1977
        files = self.get_versionedfiles()
1978
        self.get_diamond_files(files)
1979
        if self.key_length == 1:
1980
            keys = [('merged',), ('left',), ('right',), ('absent',), ('base',)]
1981
        else:
1982
            keys = [
1983
                ('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1984
                ('FileA', 'absent'), ('FileA', 'base'),
1985
                ('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1986
                ('FileB', 'absent'), ('FileB', 'base'),
1987
                ('absent', 'absent'),
1988
                ]
1989
        parent_map = files.get_parent_map(keys)
1990
        entries = files.get_record_stream(keys, 'unordered', False)
1991
        self.assertAbsentRecord(files, keys, parent_map, entries)
1992
        entries = files.get_record_stream(keys, 'topological', False)
1993
        self.assertAbsentRecord(files, keys, parent_map, entries)
1994
4005.3.3 by Robert Collins
Test NetworkRecordStream with delta'd texts.
1995
    def assertRecordHasContent(self, record, bytes):
1996
        """Assert that record has the bytes bytes."""
1997
        self.assertEqual(bytes, record.get_bytes_as('fulltext'))
4005.3.7 by Robert Collins
Review feedback.
1998
        self.assertEqual(bytes, ''.join(record.get_bytes_as('chunked')))
4005.3.3 by Robert Collins
Test NetworkRecordStream with delta'd texts.
1999
4005.3.2 by Robert Collins
First passing NetworkRecordStream test - a fulltext from any record type which isn't a chunked or fulltext can be serialised and deserialised successfully.
2000
    def test_get_record_stream_native_formats_are_wire_ready_one_ft(self):
2001
        files = self.get_versionedfiles()
2002
        key = self.get_simple_key('foo')
2003
        files.add_lines(key, (), ['my text\n', 'content'])
2004
        stream = files.get_record_stream([key], 'unordered', False)
6634.2.1 by Martin
Apply 2to3 next fixer and make compatible
2005
        record = next(stream)
4005.3.2 by Robert Collins
First passing NetworkRecordStream test - a fulltext from any record type which isn't a chunked or fulltext can be serialised and deserialised successfully.
2006
        if record.storage_kind in ('chunked', 'fulltext'):
2007
            # chunked and fulltext representations are for direct use not wire
4005.3.3 by Robert Collins
Test NetworkRecordStream with delta'd texts.
2008
            # serialisation: check they are able to be used directly. To send
2009
            # such records over the wire translation will be needed.
2010
            self.assertRecordHasContent(record, "my text\ncontent")
4005.3.2 by Robert Collins
First passing NetworkRecordStream test - a fulltext from any record type which isn't a chunked or fulltext can be serialised and deserialised successfully.
2011
        else:
2012
            bytes = [record.get_bytes_as(record.storage_kind)]
2013
            network_stream = versionedfile.NetworkRecordStream(bytes).read()
2014
            source_record = record
2015
            records = []
2016
            for record in network_stream:
2017
                records.append(record)
2018
                self.assertEqual(source_record.storage_kind,
2019
                    record.storage_kind)
2020
                self.assertEqual(source_record.parents, record.parents)
2021
                self.assertEqual(
2022
                    source_record.get_bytes_as(source_record.storage_kind),
2023
                    record.get_bytes_as(record.storage_kind))
2024
            self.assertEqual(1, len(records))
2025
4005.3.5 by Robert Collins
Interface level test for using delta_closure=True over the network.
2026
    def assertStreamMetaEqual(self, records, expected, stream):
2027
        """Assert that streams expected and stream have the same records.
4032.1.2 by John Arbash Meinel
Track down a few more files that have trailing whitespace.
2028
4005.3.5 by Robert Collins
Interface level test for using delta_closure=True over the network.
2029
        :param records: A list to collect the seen records.
2030
        :return: A generator of the records in stream.
2031
        """
6631.2.3 by Martin
Fix per_versionedfile test failures and rethink future_builtins
2032
        # We make assertions during copying to catch things early for easier
2033
        # debugging. This must use the iterating zip() from the future.
6631.2.2 by Martin
Run 2to3 itertools fixer and refactor
2034
        for record, ref_record in zip(stream, expected):
4005.3.5 by Robert Collins
Interface level test for using delta_closure=True over the network.
2035
            records.append(record)
2036
            self.assertEqual(ref_record.key, record.key)
2037
            self.assertEqual(ref_record.storage_kind, record.storage_kind)
2038
            self.assertEqual(ref_record.parents, record.parents)
2039
            yield record
2040
2041
    def stream_to_bytes_or_skip_counter(self, skipped_records, full_texts,
2042
        stream):
2043
        """Convert a stream to a bytes iterator.
2044
2045
        :param skipped_records: A list with one element to increment when a
2046
            record is skipped.
4032.1.2 by John Arbash Meinel
Track down a few more files that have trailing whitespace.
2047
        :param full_texts: A dict from key->fulltext representation, for
4005.3.5 by Robert Collins
Interface level test for using delta_closure=True over the network.
2048
            checking chunked or fulltext stored records.
2049
        :param stream: A record_stream.
2050
        :return: An iterator over the bytes of each record.
2051
        """
2052
        for record in stream:
2053
            if record.storage_kind in ('chunked', 'fulltext'):
2054
                skipped_records[0] += 1
2055
                # check the content is correct for direct use.
2056
                self.assertRecordHasContent(record, full_texts[record.key])
2057
            else:
2058
                yield record.get_bytes_as(record.storage_kind)
2059
4005.3.3 by Robert Collins
Test NetworkRecordStream with delta'd texts.
2060
    def test_get_record_stream_native_formats_are_wire_ready_ft_delta(self):
2061
        files = self.get_versionedfiles()
2062
        target_files = self.get_versionedfiles('target')
2063
        key = self.get_simple_key('ft')
2064
        key_delta = self.get_simple_key('delta')
2065
        files.add_lines(key, (), ['my text\n', 'content'])
2066
        if self.graph:
2067
            delta_parents = (key,)
2068
        else:
2069
            delta_parents = ()
2070
        files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
2071
        local = files.get_record_stream([key, key_delta], 'unordered', False)
2072
        ref = files.get_record_stream([key, key_delta], 'unordered', False)
2073
        skipped_records = [0]
4005.3.5 by Robert Collins
Interface level test for using delta_closure=True over the network.
2074
        full_texts = {
2075
            key: "my text\ncontent",
2076
            key_delta: "different\ncontent\n",
2077
            }
2078
        byte_stream = self.stream_to_bytes_or_skip_counter(
2079
            skipped_records, full_texts, local)
4005.3.3 by Robert Collins
Test NetworkRecordStream with delta'd texts.
2080
        network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
2081
        records = []
2082
        # insert the stream from the network into a versioned files object so we can
2083
        # check the content was carried across correctly without doing delta
2084
        # inspection.
4005.3.5 by Robert Collins
Interface level test for using delta_closure=True over the network.
2085
        target_files.insert_record_stream(
2086
            self.assertStreamMetaEqual(records, ref, network_stream))
4005.3.3 by Robert Collins
Test NetworkRecordStream with delta'd texts.
2087
        # No duplicates on the wire thank you!
2088
        self.assertEqual(2, len(records) + skipped_records[0])
2089
        if len(records):
4005.3.4 by Robert Collins
Test copying just a delta over the wire.
2090
            # if any content was copied it all must have all been.
2091
            self.assertIdenticalVersionedFile(files, target_files)
2092
2093
    def test_get_record_stream_native_formats_are_wire_ready_delta(self):
2094
        # copy a delta over the wire
2095
        files = self.get_versionedfiles()
2096
        target_files = self.get_versionedfiles('target')
2097
        key = self.get_simple_key('ft')
2098
        key_delta = self.get_simple_key('delta')
2099
        files.add_lines(key, (), ['my text\n', 'content'])
2100
        if self.graph:
2101
            delta_parents = (key,)
2102
        else:
2103
            delta_parents = ()
2104
        files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
4005.3.5 by Robert Collins
Interface level test for using delta_closure=True over the network.
2105
        # Copy the basis text across so we can reconstruct the delta during
2106
        # insertion into target.
4005.3.4 by Robert Collins
Test copying just a delta over the wire.
2107
        target_files.insert_record_stream(files.get_record_stream([key],
2108
            'unordered', False))
2109
        local = files.get_record_stream([key_delta], 'unordered', False)
2110
        ref = files.get_record_stream([key_delta], 'unordered', False)
2111
        skipped_records = [0]
4005.3.5 by Robert Collins
Interface level test for using delta_closure=True over the network.
2112
        full_texts = {
2113
            key_delta: "different\ncontent\n",
2114
            }
2115
        byte_stream = self.stream_to_bytes_or_skip_counter(
2116
            skipped_records, full_texts, local)
4005.3.4 by Robert Collins
Test copying just a delta over the wire.
2117
        network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
2118
        records = []
2119
        # insert the stream from the network into a versioned files object so we can
2120
        # check the content was carried across correctly without doing delta
2121
        # inspection during check_stream.
4005.3.5 by Robert Collins
Interface level test for using delta_closure=True over the network.
2122
        target_files.insert_record_stream(
2123
            self.assertStreamMetaEqual(records, ref, network_stream))
4005.3.4 by Robert Collins
Test copying just a delta over the wire.
2124
        # No duplicates on the wire thank you!
2125
        self.assertEqual(1, len(records) + skipped_records[0])
2126
        if len(records):
2127
            # if any content was copied it all must have all been
4005.3.3 by Robert Collins
Test NetworkRecordStream with delta'd texts.
2128
            self.assertIdenticalVersionedFile(files, target_files)
2129
4005.3.5 by Robert Collins
Interface level test for using delta_closure=True over the network.
2130
    def test_get_record_stream_wire_ready_delta_closure_included(self):
2131
        # copy a delta over the wire with the ability to get its full text.
2132
        files = self.get_versionedfiles()
2133
        key = self.get_simple_key('ft')
2134
        key_delta = self.get_simple_key('delta')
2135
        files.add_lines(key, (), ['my text\n', 'content'])
2136
        if self.graph:
2137
            delta_parents = (key,)
2138
        else:
2139
            delta_parents = ()
2140
        files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
2141
        local = files.get_record_stream([key_delta], 'unordered', True)
2142
        ref = files.get_record_stream([key_delta], 'unordered', True)
2143
        skipped_records = [0]
2144
        full_texts = {
2145
            key_delta: "different\ncontent\n",
2146
            }
2147
        byte_stream = self.stream_to_bytes_or_skip_counter(
2148
            skipped_records, full_texts, local)
2149
        network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
2150
        records = []
2151
        # insert the stream from the network into a versioned files object so we can
2152
        # check the content was carried across correctly without doing delta
2153
        # inspection during check_stream.
2154
        for record in self.assertStreamMetaEqual(records, ref, network_stream):
2155
            # we have to be able to get the full text out:
2156
            self.assertRecordHasContent(record, full_texts[record.key])
2157
        # No duplicates on the wire thank you!
2158
        self.assertEqual(1, len(records) + skipped_records[0])
2159
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2160
    def assertAbsentRecord(self, files, keys, parents, entries):
2161
        """Helper for test_get_record_stream_missing_records_are_absent."""
2162
        seen = set()
2163
        for factory in entries:
2164
            seen.add(factory.key)
2165
            if factory.key[-1] == 'absent':
2166
                self.assertEqual('absent', factory.storage_kind)
2167
                self.assertEqual(None, factory.sha1)
2168
                self.assertEqual(None, factory.parents)
2169
            else:
2170
                self.assertValidStorageKind(factory.storage_kind)
4241.4.1 by Ian Clatworthy
add sha generation support to versionedfiles
2171
                if factory.sha1 is not None:
2172
                    sha1 = files.get_sha1s([factory.key])[factory.key]
2173
                    self.assertEqual(sha1, factory.sha1)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2174
                self.assertEqual(parents[factory.key], factory.parents)
2175
                self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
2176
                    str)
2177
        self.assertEqual(set(keys), seen)
2178
2179
    def test_filter_absent_records(self):
2180
        """Requested missing records can be filter trivially."""
2181
        files = self.get_versionedfiles()
2182
        self.get_diamond_files(files)
2183
        keys, _ = self.get_keys_and_sort_order()
2184
        parent_map = files.get_parent_map(keys)
2185
        # Add an absent record in the middle of the present keys. (We don't ask
2186
        # for just absent keys to ensure that content before and after the
2187
        # absent keys is still delivered).
2188
        present_keys = list(keys)
2189
        if self.key_length == 1:
2190
            keys.insert(2, ('extra',))
2191
        else:
2192
            keys.insert(2, ('extra', 'extra'))
2193
        entries = files.get_record_stream(keys, 'unordered', False)
2194
        seen = set()
2195
        self.capture_stream(files, versionedfile.filter_absent(entries), seen.add,
2196
            parent_map)
2197
        self.assertEqual(set(present_keys), seen)
2198
2199
    def get_mapper(self):
2200
        """Get a mapper suitable for the key length of the test interface."""
2201
        if self.key_length == 1:
2202
            return ConstantMapper('source')
2203
        else:
2204
            return HashEscapedPrefixMapper()
2205
2206
    def get_parents(self, parents):
2207
        """Get parents, taking self.graph into consideration."""
2208
        if self.graph:
2209
            return parents
2210
        else:
2211
            return None
2212
4332.3.32 by Robert Collins
Merge bzr.dev.
2213
    def test_get_annotator(self):
2214
        files = self.get_versionedfiles()
2215
        self.get_diamond_files(files)
2216
        origin_key = self.get_simple_key('origin')
2217
        base_key = self.get_simple_key('base')
2218
        left_key = self.get_simple_key('left')
2219
        right_key = self.get_simple_key('right')
2220
        merged_key = self.get_simple_key('merged')
2221
        # annotator = files.get_annotator()
2222
        # introduced full text
2223
        origins, lines = files.get_annotator().annotate(origin_key)
2224
        self.assertEqual([(origin_key,)], origins)
2225
        self.assertEqual(['origin\n'], lines)
2226
        # a delta
2227
        origins, lines = files.get_annotator().annotate(base_key)
2228
        self.assertEqual([(base_key,)], origins)
2229
        # a merge
2230
        origins, lines = files.get_annotator().annotate(merged_key)
2231
        if self.graph:
2232
            self.assertEqual([
2233
                (base_key,),
2234
                (left_key,),
2235
                (right_key,),
2236
                (merged_key,),
2237
                ], origins)
2238
        else:
2239
            # Without a graph everything is new.
2240
            self.assertEqual([
2241
                (merged_key,),
2242
                (merged_key,),
2243
                (merged_key,),
2244
                (merged_key,),
2245
                ], origins)
2246
        self.assertRaises(RevisionNotPresent,
2247
            files.get_annotator().annotate, self.get_simple_key('missing-key'))
2248
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2249
    def test_get_parent_map(self):
2250
        files = self.get_versionedfiles()
2251
        if self.key_length == 1:
2252
            parent_details = [
2253
                (('r0',), self.get_parents(())),
2254
                (('r1',), self.get_parents((('r0',),))),
2255
                (('r2',), self.get_parents(())),
2256
                (('r3',), self.get_parents(())),
2257
                (('m',), self.get_parents((('r0',),('r1',),('r2',),('r3',)))),
2258
                ]
2259
        else:
2260
            parent_details = [
2261
                (('FileA', 'r0'), self.get_parents(())),
2262
                (('FileA', 'r1'), self.get_parents((('FileA', 'r0'),))),
2263
                (('FileA', 'r2'), self.get_parents(())),
2264
                (('FileA', 'r3'), self.get_parents(())),
2265
                (('FileA', 'm'), self.get_parents((('FileA', 'r0'),
2266
                    ('FileA', 'r1'), ('FileA', 'r2'), ('FileA', 'r3')))),
2267
                ]
2268
        for key, parents in parent_details:
2269
            files.add_lines(key, parents, [])
2270
            # immediately after adding it should be queryable.
2271
            self.assertEqual({key:parents}, files.get_parent_map([key]))
2272
        # We can ask for an empty set
2273
        self.assertEqual({}, files.get_parent_map([]))
2274
        # We can ask for many keys
2275
        all_parents = dict(parent_details)
2276
        self.assertEqual(all_parents, files.get_parent_map(all_parents.keys()))
2277
        # Absent keys are just not included in the result.
2278
        keys = all_parents.keys()
2279
        if self.key_length == 1:
2280
            keys.insert(1, ('missing',))
2281
        else:
2282
            keys.insert(1, ('missing', 'missing'))
2283
        # Absent keys are just ignored
2284
        self.assertEqual(all_parents, files.get_parent_map(keys))
2285
2286
    def test_get_sha1s(self):
2287
        files = self.get_versionedfiles()
2288
        self.get_diamond_files(files)
2289
        if self.key_length == 1:
2290
            keys = [('base',), ('origin',), ('left',), ('merged',), ('right',)]
2291
        else:
2292
            # ask for shas from different prefixes.
2293
            keys = [
2294
                ('FileA', 'base'), ('FileB', 'origin'), ('FileA', 'left'),
2295
                ('FileA', 'merged'), ('FileB', 'right'),
2296
                ]
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
2297
        self.assertEqual({
2298
            keys[0]: '51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',
2299
            keys[1]: '00e364d235126be43292ab09cb4686cf703ddc17',
2300
            keys[2]: 'a8478686da38e370e32e42e8a0c220e33ee9132f',
2301
            keys[3]: 'ed8bce375198ea62444dc71952b22cfc2b09226d',
2302
            keys[4]: '9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',
2303
            },
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2304
            files.get_sha1s(keys))
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
2305
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2306
    def test_insert_record_stream_empty(self):
2307
        """Inserting an empty record stream should work."""
2308
        files = self.get_versionedfiles()
2309
        files.insert_record_stream([])
2310
2311
    def assertIdenticalVersionedFile(self, expected, actual):
2312
        """Assert that left and right have the same contents."""
2313
        self.assertEqual(set(actual.keys()), set(expected.keys()))
2314
        actual_parents = actual.get_parent_map(actual.keys())
2315
        if self.graph:
2316
            self.assertEqual(actual_parents, expected.get_parent_map(expected.keys()))
2317
        else:
2318
            for key, parents in actual_parents.items():
2319
                self.assertEqual(None, parents)
2320
        for key in actual.keys():
2321
            actual_text = actual.get_record_stream(
2322
                [key], 'unordered', True).next().get_bytes_as('fulltext')
2323
            expected_text = expected.get_record_stream(
2324
                [key], 'unordered', True).next().get_bytes_as('fulltext')
2325
            self.assertEqual(actual_text, expected_text)
2326
2327
    def test_insert_record_stream_fulltexts(self):
2328
        """Any file should accept a stream of fulltexts."""
2329
        files = self.get_versionedfiles()
2330
        mapper = self.get_mapper()
2331
        source_transport = self.get_transport('source')
2332
        source_transport.mkdir('.')
2333
        # weaves always output fulltexts.
2334
        source = make_versioned_files_factory(WeaveFile, mapper)(
2335
            source_transport)
2336
        self.get_diamond_files(source, trailing_eol=False)
2337
        stream = source.get_record_stream(source.keys(), 'topological',
2338
            False)
2339
        files.insert_record_stream(stream)
2340
        self.assertIdenticalVersionedFile(source, files)
2341
2342
    def test_insert_record_stream_fulltexts_noeol(self):
2343
        """Any file should accept a stream of fulltexts."""
2344
        files = self.get_versionedfiles()
2345
        mapper = self.get_mapper()
2346
        source_transport = self.get_transport('source')
2347
        source_transport.mkdir('.')
2348
        # weaves always output fulltexts.
2349
        source = make_versioned_files_factory(WeaveFile, mapper)(
2350
            source_transport)
2351
        self.get_diamond_files(source, trailing_eol=False)
2352
        stream = source.get_record_stream(source.keys(), 'topological',
2353
            False)
2354
        files.insert_record_stream(stream)
2355
        self.assertIdenticalVersionedFile(source, files)
2356
2357
    def test_insert_record_stream_annotated_knits(self):
2358
        """Any file should accept a stream from plain knits."""
2359
        files = self.get_versionedfiles()
2360
        mapper = self.get_mapper()
2361
        source_transport = self.get_transport('source')
2362
        source_transport.mkdir('.')
2363
        source = make_file_factory(True, mapper)(source_transport)
2364
        self.get_diamond_files(source)
2365
        stream = source.get_record_stream(source.keys(), 'topological',
2366
            False)
2367
        files.insert_record_stream(stream)
2368
        self.assertIdenticalVersionedFile(source, files)
2369
2370
    def test_insert_record_stream_annotated_knits_noeol(self):
2371
        """Any file should accept a stream from plain knits."""
2372
        files = self.get_versionedfiles()
2373
        mapper = self.get_mapper()
2374
        source_transport = self.get_transport('source')
2375
        source_transport.mkdir('.')
2376
        source = make_file_factory(True, mapper)(source_transport)
2377
        self.get_diamond_files(source, trailing_eol=False)
2378
        stream = source.get_record_stream(source.keys(), 'topological',
2379
            False)
2380
        files.insert_record_stream(stream)
2381
        self.assertIdenticalVersionedFile(source, files)
2382
2383
    def test_insert_record_stream_plain_knits(self):
2384
        """Any file should accept a stream from plain knits."""
2385
        files = self.get_versionedfiles()
2386
        mapper = self.get_mapper()
2387
        source_transport = self.get_transport('source')
2388
        source_transport.mkdir('.')
2389
        source = make_file_factory(False, mapper)(source_transport)
2390
        self.get_diamond_files(source)
2391
        stream = source.get_record_stream(source.keys(), 'topological',
2392
            False)
2393
        files.insert_record_stream(stream)
2394
        self.assertIdenticalVersionedFile(source, files)
2395
2396
    def test_insert_record_stream_plain_knits_noeol(self):
2397
        """Any file should accept a stream from plain knits."""
2398
        files = self.get_versionedfiles()
2399
        mapper = self.get_mapper()
2400
        source_transport = self.get_transport('source')
2401
        source_transport.mkdir('.')
2402
        source = make_file_factory(False, mapper)(source_transport)
2403
        self.get_diamond_files(source, trailing_eol=False)
2404
        stream = source.get_record_stream(source.keys(), 'topological',
2405
            False)
2406
        files.insert_record_stream(stream)
2407
        self.assertIdenticalVersionedFile(source, files)
2408
2409
    def test_insert_record_stream_existing_keys(self):
2410
        """Inserting keys already in a file should not error."""
2411
        files = self.get_versionedfiles()
2412
        source = self.get_versionedfiles('source')
2413
        self.get_diamond_files(source)
2414
        # insert some keys into f.
2415
        self.get_diamond_files(files, left_only=True)
2416
        stream = source.get_record_stream(source.keys(), 'topological',
2417
            False)
2418
        files.insert_record_stream(stream)
2419
        self.assertIdenticalVersionedFile(source, files)
2420
2421
    def test_insert_record_stream_missing_keys(self):
2422
        """Inserting a stream with absent keys should raise an error."""
2423
        files = self.get_versionedfiles()
2424
        source = self.get_versionedfiles('source')
2425
        stream = source.get_record_stream([('missing',) * self.key_length],
2426
            'topological', False)
2427
        self.assertRaises(errors.RevisionNotPresent, files.insert_record_stream,
2428
            stream)
2429
2430
    def test_insert_record_stream_out_of_order(self):
2431
        """An out of order stream can either error or work."""
2432
        files = self.get_versionedfiles()
2433
        source = self.get_versionedfiles('source')
2434
        self.get_diamond_files(source)
2435
        if self.key_length == 1:
2436
            origin_keys = [('origin',)]
2437
            end_keys = [('merged',), ('left',)]
2438
            start_keys = [('right',), ('base',)]
2439
        else:
2440
            origin_keys = [('FileA', 'origin'), ('FileB', 'origin')]
2441
            end_keys = [('FileA', 'merged',), ('FileA', 'left',),
2442
                ('FileB', 'merged',), ('FileB', 'left',)]
2443
            start_keys = [('FileA', 'right',), ('FileA', 'base',),
2444
                ('FileB', 'right',), ('FileB', 'base',)]
2445
        origin_entries = source.get_record_stream(origin_keys, 'unordered', False)
2446
        end_entries = source.get_record_stream(end_keys, 'topological', False)
2447
        start_entries = source.get_record_stream(start_keys, 'topological', False)
6631.2.3 by Martin
Fix per_versionedfile test failures and rethink future_builtins
2448
        entries = itertools.chain(origin_entries, end_entries, start_entries)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2449
        try:
2450
            files.insert_record_stream(entries)
2451
        except RevisionNotPresent:
2452
            # Must not have corrupted the file.
2453
            files.check()
2454
        else:
2455
            self.assertIdenticalVersionedFile(source, files)
2456
4634.84.2 by Andrew Bennetts
Add test.
2457
    def test_insert_record_stream_long_parent_chain_out_of_order(self):
2458
        """An out of order stream can either error or work."""
2459
        if not self.graph:
2460
            raise TestNotApplicable('ancestry info only relevant with graph.')
2461
        # Create a reasonably long chain of records based on each other, where
2462
        # most will be deltas.
2463
        source = self.get_versionedfiles('source')
2464
        parents = ()
2465
        keys = []
2466
        content = [('same same %d\n' % n) for n in range(500)]
2467
        for letter in 'abcdefghijklmnopqrstuvwxyz':
2468
            key = ('key-' + letter,)
2469
            if self.key_length == 2:
2470
                key = ('prefix',) + key
2471
            content.append('content for ' + letter + '\n')
2472
            source.add_lines(key, parents, content)
2473
            keys.append(key)
2474
            parents = (key,)
2475
        # Create a stream of these records, excluding the first record that the
2476
        # rest ultimately depend upon, and insert it into a new vf.
2477
        streams = []
2478
        for key in reversed(keys):
2479
            streams.append(source.get_record_stream([key], 'unordered', False))
6631.2.3 by Martin
Fix per_versionedfile test failures and rethink future_builtins
2480
        deltas = itertools.chain.from_iterable(streams[:-1])
4634.84.2 by Andrew Bennetts
Add test.
2481
        files = self.get_versionedfiles()
2482
        try:
2483
            files.insert_record_stream(deltas)
2484
        except RevisionNotPresent:
2485
            # Must not have corrupted the file.
2486
            files.check()
2487
        else:
2488
            # Must only report either just the first key as a missing parent,
2489
            # no key as missing (for nodelta scenarios).
2490
            missing = set(files.get_missing_compression_parent_keys())
2491
            missing.discard(keys[0])
2492
            self.assertEqual(set(), missing)
2493
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2494
    def get_knit_delta_source(self):
2495
        """Get a source that can produce a stream with knit delta records,
2496
        regardless of this test's scenario.
2497
        """
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2498
        mapper = self.get_mapper()
2499
        source_transport = self.get_transport('source')
2500
        source_transport.mkdir('.')
2501
        source = make_file_factory(False, mapper)(source_transport)
4009.3.1 by Andrew Bennetts
Fix test_insert_record_stream_delta_missing_basis_no_corruption to test what it claims to, and fix KnitVersionedFiles.get_record_stream to match the expected exception.
2502
        get_diamond_files(source, self.key_length, trailing_eol=True,
2503
            nograph=False, left_only=False)
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2504
        return source
2505
2506
    def test_insert_record_stream_delta_missing_basis_no_corruption(self):
2507
        """Insertion where a needed basis is not included notifies the caller
2508
        of the missing basis.  In the meantime a record missing its basis is
2509
        not added.
2510
        """
2511
        source = self.get_knit_delta_source()
4009.3.7 by Andrew Bennetts
Most tests passing.
2512
        keys = [self.get_simple_key('origin'), self.get_simple_key('merged')]
2513
        entries = source.get_record_stream(keys, 'unordered', False)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2514
        files = self.get_versionedfiles()
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2515
        if self.support_partial_insertion:
4009.3.12 by Robert Collins
Polish on inserting record streams with missing compression parents.
2516
            self.assertEqual([],
2517
                list(files.get_missing_compression_parent_keys()))
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2518
            files.insert_record_stream(entries)
2519
            missing_bases = files.get_missing_compression_parent_keys()
6619.3.12 by Jelmer Vernooij
Use 2to3 set_literal fixer.
2520
            self.assertEqual({self.get_simple_key('left')},
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2521
                set(missing_bases))
4009.3.7 by Andrew Bennetts
Most tests passing.
2522
            self.assertEqual(set(keys), set(files.get_parent_map(keys)))
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2523
        else:
2524
            self.assertRaises(
2525
                errors.RevisionNotPresent, files.insert_record_stream, entries)
4009.3.7 by Andrew Bennetts
Most tests passing.
2526
            files.check()
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2527
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2528
    def test_insert_record_stream_delta_missing_basis_can_be_added_later(self):
2529
        """Insertion where a needed basis is not included notifies the caller
2530
        of the missing basis.  That basis can be added in a second
2531
        insert_record_stream call that does not need to repeat records present
4009.3.3 by Andrew Bennetts
Add docstrings.
2532
        in the previous stream.  The record(s) that required that basis are
2533
        fully inserted once their basis is no longer missing.
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2534
        """
2535
        if not self.support_partial_insertion:
2536
            raise TestNotApplicable(
2537
                'versioned file scenario does not support partial insertion')
2538
        source = self.get_knit_delta_source()
2539
        entries = source.get_record_stream([self.get_simple_key('origin'),
2540
            self.get_simple_key('merged')], 'unordered', False)
2541
        files = self.get_versionedfiles()
2542
        files.insert_record_stream(entries)
2543
        missing_bases = files.get_missing_compression_parent_keys()
6619.3.12 by Jelmer Vernooij
Use 2to3 set_literal fixer.
2544
        self.assertEqual({self.get_simple_key('left')},
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2545
            set(missing_bases))
4009.3.7 by Andrew Bennetts
Most tests passing.
2546
        # 'merged' is inserted (although a commit of a write group involving
2547
        # this versionedfiles would fail).
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2548
        merged_key = self.get_simple_key('merged')
4009.3.7 by Andrew Bennetts
Most tests passing.
2549
        self.assertEqual(
2550
            [merged_key], files.get_parent_map([merged_key]).keys())
2551
        # Add the full delta closure of the missing records
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2552
        missing_entries = source.get_record_stream(
4009.3.7 by Andrew Bennetts
Most tests passing.
2553
            missing_bases, 'unordered', True)
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2554
        files.insert_record_stream(missing_entries)
4009.3.7 by Andrew Bennetts
Most tests passing.
2555
        # Now 'merged' is fully inserted (and a commit would succeed).
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2556
        self.assertEqual([], list(files.get_missing_compression_parent_keys()))
2557
        self.assertEqual(
2558
            [merged_key], files.get_parent_map([merged_key]).keys())
4009.3.7 by Andrew Bennetts
Most tests passing.
2559
        files.check()
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2560
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2561
    def test_iter_lines_added_or_present_in_keys(self):
2562
        # test that we get at least an equalset of the lines added by
2563
        # versions in the store.
2564
        # the ordering here is to make a tree so that dumb searches have
2565
        # more changes to muck up.
2566
4961.2.9 by Martin Pool
Rip out most remaining uses of DummyProgressBar
2567
        class InstrumentedProgress(progress.ProgressTask):
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2568
2569
            def __init__(self):
4961.2.9 by Martin Pool
Rip out most remaining uses of DummyProgressBar
2570
                progress.ProgressTask.__init__(self)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2571
                self.updates = []
2572
2573
            def update(self, msg=None, current=None, total=None):
2574
                self.updates.append((msg, current, total))
2575
2576
        files = self.get_versionedfiles()
2577
        # add a base to get included
2578
        files.add_lines(self.get_simple_key('base'), (), ['base\n'])
2579
        # add a ancestor to be included on one side
2580
        files.add_lines(self.get_simple_key('lancestor'), (), ['lancestor\n'])
2581
        # add a ancestor to be included on the other side
2582
        files.add_lines(self.get_simple_key('rancestor'),
2583
            self.get_parents([self.get_simple_key('base')]), ['rancestor\n'])
2584
        # add a child of rancestor with no eofile-nl
2585
        files.add_lines(self.get_simple_key('child'),
2586
            self.get_parents([self.get_simple_key('rancestor')]),
2587
            ['base\n', 'child\n'])
2588
        # add a child of lancestor and base to join the two roots
2589
        files.add_lines(self.get_simple_key('otherchild'),
2590
            self.get_parents([self.get_simple_key('lancestor'),
2591
                self.get_simple_key('base')]),
2592
            ['base\n', 'lancestor\n', 'otherchild\n'])
2593
        def iter_with_keys(keys, expected):
2594
            # now we need to see what lines are returned, and how often.
2595
            lines = {}
2596
            progress = InstrumentedProgress()
2597
            # iterate over the lines
2598
            for line in files.iter_lines_added_or_present_in_keys(keys,
2599
                pb=progress):
2600
                lines.setdefault(line, 0)
2601
                lines[line] += 1
2602
            if []!= progress.updates:
2603
                self.assertEqual(expected, progress.updates)
2604
            return lines
2605
        lines = iter_with_keys(
2606
            [self.get_simple_key('child'), self.get_simple_key('otherchild')],
4103.3.4 by Martin Pool
Update test that depends on progress bar strings
2607
            [('Walking content', 0, 2),
2608
             ('Walking content', 1, 2),
2609
             ('Walking content', 2, 2)])
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2610
        # we must see child and otherchild
2611
        self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2612
        self.assertTrue(
2613
            lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2614
        # we dont care if we got more than that.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
2615
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2616
        # test all lines
2617
        lines = iter_with_keys(files.keys(),
4103.3.4 by Martin Pool
Update test that depends on progress bar strings
2618
            [('Walking content', 0, 5),
2619
             ('Walking content', 1, 5),
2620
             ('Walking content', 2, 5),
2621
             ('Walking content', 3, 5),
2622
             ('Walking content', 4, 5),
2623
             ('Walking content', 5, 5)])
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2624
        # all lines must be seen at least once
2625
        self.assertTrue(lines[('base\n', self.get_simple_key('base'))] > 0)
2626
        self.assertTrue(
2627
            lines[('lancestor\n', self.get_simple_key('lancestor'))] > 0)
2628
        self.assertTrue(
2629
            lines[('rancestor\n', self.get_simple_key('rancestor'))] > 0)
2630
        self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2631
        self.assertTrue(
2632
            lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2633
2634
    def test_make_mpdiffs(self):
6622.1.34 by Jelmer Vernooij
Rename brzlib => breezy.
2635
        from breezy import multiparent
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2636
        files = self.get_versionedfiles('source')
2637
        # add texts that should trip the knit maximum delta chain threshold
2638
        # as well as doing parallel chains of data in knits.
2639
        # this is done by two chains of 25 insertions
2640
        files.add_lines(self.get_simple_key('base'), [], ['line\n'])
2641
        files.add_lines(self.get_simple_key('noeol'),
2642
            self.get_parents([self.get_simple_key('base')]), ['line'])
2643
        # detailed eol tests:
2644
        # shared last line with parent no-eol
2645
        files.add_lines(self.get_simple_key('noeolsecond'),
2646
            self.get_parents([self.get_simple_key('noeol')]),
2647
                ['line\n', 'line'])
2648
        # differing last line with parent, both no-eol
2649
        files.add_lines(self.get_simple_key('noeolnotshared'),
2650
            self.get_parents([self.get_simple_key('noeolsecond')]),
2651
                ['line\n', 'phone'])
2652
        # add eol following a noneol parent, change content
2653
        files.add_lines(self.get_simple_key('eol'),
2654
            self.get_parents([self.get_simple_key('noeol')]), ['phone\n'])
2655
        # add eol following a noneol parent, no change content
2656
        files.add_lines(self.get_simple_key('eolline'),
2657
            self.get_parents([self.get_simple_key('noeol')]), ['line\n'])
2658
        # noeol with no parents:
2659
        files.add_lines(self.get_simple_key('noeolbase'), [], ['line'])
2660
        # noeol preceeding its leftmost parent in the output:
2661
        # this is done by making it a merge of two parents with no common
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
2662
        # anestry: noeolbase and noeol with the
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2663
        # later-inserted parent the leftmost.
2664
        files.add_lines(self.get_simple_key('eolbeforefirstparent'),
2665
            self.get_parents([self.get_simple_key('noeolbase'),
2666
                self.get_simple_key('noeol')]),
2667
            ['line'])
2668
        # two identical eol texts
2669
        files.add_lines(self.get_simple_key('noeoldup'),
2670
            self.get_parents([self.get_simple_key('noeol')]), ['line'])
2671
        next_parent = self.get_simple_key('base')
2672
        text_name = 'chain1-'
2673
        text = ['line\n']
2674
        sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
2675
                 1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
2676
                 2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
2677
                 3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
2678
                 4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
2679
                 5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
2680
                 6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
2681
                 7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
2682
                 8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
2683
                 9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
2684
                 10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
2685
                 11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
2686
                 12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
2687
                 13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
2688
                 14:'2c4b1736566b8ca6051e668de68650686a3922f2',
2689
                 15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
2690
                 16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
2691
                 17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
2692
                 18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
2693
                 19:'1ebed371807ba5935958ad0884595126e8c4e823',
2694
                 20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
2695
                 21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
2696
                 22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
2697
                 23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
2698
                 24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
2699
                 25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
2700
                 }
2701
        for depth in range(26):
2702
            new_version = self.get_simple_key(text_name + '%s' % depth)
2703
            text = text + ['line\n']
2704
            files.add_lines(new_version, self.get_parents([next_parent]), text)
2705
            next_parent = new_version
2706
        next_parent = self.get_simple_key('base')
2707
        text_name = 'chain2-'
2708
        text = ['line\n']
2709
        for depth in range(26):
2710
            new_version = self.get_simple_key(text_name + '%s' % depth)
2711
            text = text + ['line\n']
2712
            files.add_lines(new_version, self.get_parents([next_parent]), text)
2713
            next_parent = new_version
2714
        target = self.get_versionedfiles('target')
2715
        for key in multiparent.topo_iter_keys(files, files.keys()):
2716
            mpdiff = files.make_mpdiffs([key])[0]
2717
            parents = files.get_parent_map([key])[key] or []
2718
            target.add_mpdiffs(
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
2719
                [(key, parents, files.get_sha1s([key])[key], mpdiff)])
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2720
            self.assertEqualDiff(
2721
                files.get_record_stream([key], 'unordered',
2722
                    True).next().get_bytes_as('fulltext'),
2723
                target.get_record_stream([key], 'unordered',
2724
                    True).next().get_bytes_as('fulltext')
2725
                )
2726
2727
    def test_keys(self):
2728
        # While use is discouraged, versions() is still needed by aspects of
2729
        # bzr.
2730
        files = self.get_versionedfiles()
2731
        self.assertEqual(set(), set(files.keys()))
2732
        if self.key_length == 1:
2733
            key = ('foo',)
2734
        else:
2735
            key = ('foo', 'bar',)
2736
        files.add_lines(key, (), [])
6619.3.12 by Jelmer Vernooij
Use 2to3 set_literal fixer.
2737
        self.assertEqual({key}, set(files.keys()))
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
2738
2739
2740
class VirtualVersionedFilesTests(TestCase):
2741
    """Basic tests for the VirtualVersionedFiles implementations."""
2742
2743
    def _get_parent_map(self, keys):
2744
        ret = {}
2745
        for k in keys:
2746
            if k in self._parent_map:
2747
                ret[k] = self._parent_map[k]
2748
        return ret
2749
2750
    def setUp(self):
6552.1.4 by Vincent Ladeuil
Remaining tests matching setup(self) that can be rewritten with super().
2751
        super(VirtualVersionedFilesTests, self).setUp()
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
2752
        self._lines = {}
2753
        self._parent_map = {}
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
2754
        self.texts = VirtualVersionedFiles(self._get_parent_map,
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
2755
                                           self._lines.get)
2756
2757
    def test_add_lines(self):
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
2758
        self.assertRaises(NotImplementedError,
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
2759
                self.texts.add_lines, "foo", [], [])
2760
2761
    def test_add_mpdiffs(self):
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
2762
        self.assertRaises(NotImplementedError,
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
2763
                self.texts.add_mpdiffs, [])
2764
4332.3.26 by Robert Collins
Allow passing keys to check to VersionedFile.check().
2765
    def test_check_noerrors(self):
2766
        self.texts.check()
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
2767
2768
    def test_insert_record_stream(self):
2769
        self.assertRaises(NotImplementedError, self.texts.insert_record_stream,
2770
                          [])
2771
3518.1.2 by Jelmer Vernooij
Fix some stylistic issues pointed out by Ian.
2772
    def test_get_sha1s_nonexistent(self):
6614.1.3 by Vincent Ladeuil
Fix assertEquals being deprecated by using assertEqual.
2773
        self.assertEqual({}, self.texts.get_sha1s([("NONEXISTENT",)]))
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
2774
2775
    def test_get_sha1s(self):
2776
        self._lines["key"] = ["dataline1", "dataline2"]
6614.1.3 by Vincent Ladeuil
Fix assertEquals being deprecated by using assertEqual.
2777
        self.assertEqual({("key",): osutils.sha_strings(self._lines["key"])},
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
2778
                           self.texts.get_sha1s([("key",)]))
2779
2780
    def test_get_parent_map(self):
2781
        self._parent_map = {"G": ("A", "B")}
6614.1.3 by Vincent Ladeuil
Fix assertEquals being deprecated by using assertEqual.
2782
        self.assertEqual({("G",): (("A",),("B",))},
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
2783
                          self.texts.get_parent_map([("G",), ("L",)]))
2784
2785
    def test_get_record_stream(self):
2786
        self._lines["A"] = ["FOO", "BAR"]
2787
        it = self.texts.get_record_stream([("A",)], "unordered", True)
6634.2.1 by Martin
Apply 2to3 next fixer and make compatible
2788
        record = next(it)
6614.1.3 by Vincent Ladeuil
Fix assertEquals being deprecated by using assertEqual.
2789
        self.assertEqual("chunked", record.storage_kind)
2790
        self.assertEqual("FOOBAR", record.get_bytes_as("fulltext"))
2791
        self.assertEqual(["FOO", "BAR"], record.get_bytes_as("chunked"))
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
2792
2793
    def test_get_record_stream_absent(self):
2794
        it = self.texts.get_record_stream([("A",)], "unordered", True)
6634.2.1 by Martin
Apply 2to3 next fixer and make compatible
2795
        record = next(it)
6614.1.3 by Vincent Ladeuil
Fix assertEquals being deprecated by using assertEqual.
2796
        self.assertEqual("absent", record.storage_kind)
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
2797
3949.4.1 by Jelmer Vernooij
Implement VirtualVersionedFiles.iter_lines_added_or_present_in_keys.
2798
    def test_iter_lines_added_or_present_in_keys(self):
2799
        self._lines["A"] = ["FOO", "BAR"]
2800
        self._lines["B"] = ["HEY"]
2801
        self._lines["C"] = ["Alberta"]
2802
        it = self.texts.iter_lines_added_or_present_in_keys([("A",), ("B",)])
6614.1.3 by Vincent Ladeuil
Fix assertEquals being deprecated by using assertEqual.
2803
        self.assertEqual(sorted([("FOO", "A"), ("BAR", "A"), ("HEY", "B")]),
3949.4.1 by Jelmer Vernooij
Implement VirtualVersionedFiles.iter_lines_added_or_present_in_keys.
2804
            sorted(list(it)))
2805
3871.4.1 by John Arbash Meinel
Add a VFDecorator that can yield records in a specified order
2806
2807
class TestOrderingVersionedFilesDecorator(TestCaseWithMemoryTransport):
2808
2809
    def get_ordering_vf(self, key_priority):
2810
        builder = self.make_branch_builder('test')
2811
        builder.start_series()
2812
        builder.build_snapshot('A', None, [
2813
            ('add', ('', 'TREE_ROOT', 'directory', None))])
2814
        builder.build_snapshot('B', ['A'], [])
2815
        builder.build_snapshot('C', ['B'], [])
2816
        builder.build_snapshot('D', ['C'], [])
2817
        builder.finish_series()
2818
        b = builder.get_branch()
2819
        b.lock_read()
2820
        self.addCleanup(b.unlock)
2821
        vf = b.repository.inventories
2822
        return versionedfile.OrderingVersionedFilesDecorator(vf, key_priority)
2823
2824
    def test_get_empty(self):
2825
        vf = self.get_ordering_vf({})
2826
        self.assertEqual([], vf.calls)
2827
2828
    def test_get_record_stream_topological(self):
2829
        vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2830
        request_keys = [('B',), ('C',), ('D',), ('A',)]
2831
        keys = [r.key for r in vf.get_record_stream(request_keys,
2832
                                    'topological', False)]
2833
        # We should have gotten the keys in topological order
2834
        self.assertEqual([('A',), ('B',), ('C',), ('D',)], keys)
2835
        # And recorded that the request was made
2836
        self.assertEqual([('get_record_stream', request_keys, 'topological',
2837
                           False)], vf.calls)
2838
2839
    def test_get_record_stream_ordered(self):
2840
        vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2841
        request_keys = [('B',), ('C',), ('D',), ('A',)]
2842
        keys = [r.key for r in vf.get_record_stream(request_keys,
2843
                                   'unordered', False)]
2844
        # They should be returned based on their priority
2845
        self.assertEqual([('D',), ('B',), ('A',), ('C',)], keys)
2846
        # And the request recorded
2847
        self.assertEqual([('get_record_stream', request_keys, 'unordered',
2848
                           False)], vf.calls)
2849
2850
    def test_get_record_stream_implicit_order(self):
2851
        vf = self.get_ordering_vf({('B',): 2, ('D',): 1})
2852
        request_keys = [('B',), ('C',), ('D',), ('A',)]
2853
        keys = [r.key for r in vf.get_record_stream(request_keys,
2854
                                   'unordered', False)]
2855
        # A and C are not in the map, so they get sorted to the front. A comes
2856
        # before C alphabetically, so it comes back first
2857
        self.assertEqual([('A',), ('C',), ('D',), ('B',)], keys)
2858
        # And the request recorded
2859
        self.assertEqual([('get_record_stream', request_keys, 'unordered',
2860
                           False)], vf.calls)