/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar
2052.3.2 by John Arbash Meinel
Change Copyright .. by Canonical to Copyright ... Canonical
1
# Copyright (C) 2005 Canonical Ltd
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
2
#
3
# Authors:
4
#   Johan Rydberg <jrydberg@gnu.org>
5
#
6
# This program is free software; you can redistribute it and/or modify
7
# it under the terms of the GNU General Public License as published by
8
# the Free Software Foundation; either version 2 of the License, or
9
# (at your option) any later version.
1887.1.1 by Adeodato Simó
Do not separate paragraphs in the copyright statement with blank lines,
10
#
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
11
# This program is distributed in the hope that it will be useful,
12
# but WITHOUT ANY WARRANTY; without even the implied warranty of
13
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14
# GNU General Public License for more details.
1887.1.1 by Adeodato Simó
Do not separate paragraphs in the copyright statement with blank lines,
15
#
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
16
# You should have received a copy of the GNU General Public License
17
# along with this program; if not, write to the Free Software
18
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
20
1704.2.15 by Martin Pool
Remove TODO about knit testing printed from test suite
21
# TODO: might be nice to create a versionedfile with some type of corruption
22
# considered typical and check that it can be detected/corrected.
23
3350.3.16 by Robert Collins
Add test that out of order insertion fails with a clean error/does not fail.
24
from itertools import chain
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
25
from StringIO import StringIO
26
1563.2.6 by Robert Collins
Start check tests for knits (pending), and remove dead code.
27
import bzrlib
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
28
from bzrlib import (
29
    errors,
2309.4.7 by John Arbash Meinel
Update VersionedFile tests to ensure that they can take Unicode,
30
    osutils,
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
31
    progress,
32
    )
1563.2.11 by Robert Collins
Consolidate reweave and join as we have no separate usage, make reweave tests apply to all versionedfile implementations and deprecate the old reweave apis.
33
from bzrlib.errors import (
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
34
                           RevisionNotPresent,
1563.2.11 by Robert Collins
Consolidate reweave and join as we have no separate usage, make reweave tests apply to all versionedfile implementations and deprecate the old reweave apis.
35
                           RevisionAlreadyPresent,
36
                           WeaveParentMismatch
37
                           )
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
38
from bzrlib import knit as _mod_knit
2770.1.1 by Aaron Bentley
Initial implmentation of plain knit annotation
39
from bzrlib.knit import (
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
40
    cleanup_pack_knit,
41
    make_file_factory,
42
    make_pack_factory,
2770.1.1 by Aaron Bentley
Initial implmentation of plain knit annotation
43
    KnitAnnotateFactory,
2770.1.10 by Aaron Bentley
Merge bzr.dev
44
    KnitPlainFactory,
2770.1.1 by Aaron Bentley
Initial implmentation of plain knit annotation
45
    )
3350.3.14 by Robert Collins
Deprecate VersionedFile.join.
46
from bzrlib.symbol_versioning import one_four, one_five
3350.6.2 by Robert Collins
Prepare parameterised test environment.
47
from bzrlib.tests import (
48
    TestCaseWithMemoryTransport,
49
    TestScenarioApplier,
50
    TestSkipped,
51
    condition_isinstance,
52
    split_suite_by_condition,
53
    iter_suite_tests,
54
    )
3102.1.1 by Vincent Ladeuil
Rename bzrlib/test/HTTPTestUtils.py to bzrlib/tests/http_utils.py and fix
55
from bzrlib.tests.http_utils import TestCaseWithWebserver
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
56
from bzrlib.trace import mutter
1563.2.16 by Robert Collins
Change WeaveStore into VersionedFileStore and make its versoined file class parameterisable.
57
from bzrlib.transport import get_transport
1563.2.13 by Robert Collins
InterVersionedFile implemented.
58
from bzrlib.transport.memory import MemoryTransport
1684.3.1 by Robert Collins
Fix versioned file joins with empty targets.
59
from bzrlib.tsort import topo_sort
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
60
from bzrlib.tuned_gzip import GzipFile
1563.2.12 by Robert Collins
Checkpointing: created InterObject to factor out common inter object worker code, added InterVersionedFile and tests to allow making join work between any versionedfile.
61
import bzrlib.versionedfile as versionedfile
3350.6.2 by Robert Collins
Prepare parameterised test environment.
62
from bzrlib.versionedfile import (
63
    ConstantMapper,
64
    HashEscapedPrefixMapper,
65
    PrefixMapper,
66
    make_versioned_files_factory,
67
    )
1563.2.9 by Robert Collins
Update versionedfile api tests to ensure that data is available after every operation.
68
from bzrlib.weave import WeaveFile
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
69
from bzrlib.weavefile import read_weave, write_weave
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
70
71
3350.6.2 by Robert Collins
Prepare parameterised test environment.
72
def load_tests(standard_tests, module, loader):
73
    """Parameterize VersionedFiles tests for different implementations."""
74
    to_adapt, result = split_suite_by_condition(
75
        standard_tests, condition_isinstance(TestVersionedFiles))
76
    len_one_adapter = TestScenarioApplier()
77
    len_two_adapter = TestScenarioApplier()
78
    # We want to be sure of behaviour for:
79
    # weaves prefix layout (weave texts)
80
    # individually named weaves (weave inventories)
81
    # annotated knits - prefix|hash|hash-escape layout, we test the third only
82
    #                   as it is the most complex mapper.
83
    # individually named knits
84
    # individual no-graph knits in packs (signatures)
85
    # individual graph knits in packs (inventories)
86
    # individual graph nocompression knits in packs (revisions)
87
    # plain text knits in packs (texts)
88
    len_one_adapter.scenarios = [
89
        ('weave-named', {
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
90
            'cleanup':None,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
91
            'factory':make_versioned_files_factory(WeaveFile,
92
                ConstantMapper('inventory')),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
93
            'graph':True,
94
            'key_length':1,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
95
            }),
96
        ('named-knit', {
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
97
            'cleanup':None,
98
            'factory':make_file_factory(False, ConstantMapper('revisions')),
99
            'graph':True,
100
            'key_length':1,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
101
            }),
102
        ('named-nograph-knit-pack', {
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
103
            'cleanup':cleanup_pack_knit,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
104
            'factory':make_pack_factory(False, False, 1),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
105
            'graph':False,
106
            'key_length':1,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
107
            }),
108
        ('named-graph-knit-pack', {
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
109
            'cleanup':cleanup_pack_knit,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
110
            'factory':make_pack_factory(True, True, 1),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
111
            'graph':True,
112
            'key_length':1,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
113
            }),
114
        ('named-graph-nodelta-knit-pack', {
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
115
            'cleanup':cleanup_pack_knit,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
116
            'factory':make_pack_factory(True, False, 1),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
117
            'graph':True,
118
            'key_length':1,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
119
            }),
120
        ]
121
    len_two_adapter.scenarios = [
122
        ('weave-prefix', {
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
123
            'cleanup':None,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
124
            'factory':make_versioned_files_factory(WeaveFile,
125
                PrefixMapper()),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
126
            'graph':True,
127
            'key_length':2,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
128
            }),
129
        ('annotated-knit-escape', {
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
130
            'cleanup':None,
131
            'factory':make_file_factory(True, HashEscapedPrefixMapper()),
132
            'graph':True,
133
            'key_length':2,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
134
            }),
135
        ('plain-knit-pack', {
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
136
            'cleanup':cleanup_pack_knit,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
137
            'factory':make_pack_factory(True, True, 2),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
138
            'graph':True,
139
            'key_length':2,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
140
            }),
141
        ]
142
    for test in iter_suite_tests(to_adapt):
143
        result.addTests(len_one_adapter.adapt(test))
144
        result.addTests(len_two_adapter.adapt(test))
145
    return result
146
147
3350.3.11 by Robert Collins
Test inserting a stream that overlaps the current content of a knit does not error.
148
def get_diamond_vf(f, trailing_eol=True, left_only=False):
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
149
    """Get a diamond graph to exercise deltas and merges.
150
    
151
    :param trailing_eol: If True end the last line with \n.
152
    """
153
    parents = {
154
        'origin': (),
155
        'base': (('origin',),),
156
        'left': (('base',),),
157
        'right': (('base',),),
158
        'merged': (('left',), ('right',)),
159
        }
160
    # insert a diamond graph to exercise deltas and merges.
161
    if trailing_eol:
162
        last_char = '\n'
163
    else:
164
        last_char = ''
165
    f.add_lines('origin', [], ['origin' + last_char])
166
    f.add_lines('base', ['origin'], ['base' + last_char])
167
    f.add_lines('left', ['base'], ['base\n', 'left' + last_char])
3350.3.11 by Robert Collins
Test inserting a stream that overlaps the current content of a knit does not error.
168
    if not left_only:
169
        f.add_lines('right', ['base'],
170
            ['base\n', 'right' + last_char])
171
        f.add_lines('merged', ['left', 'right'],
172
            ['base\n', 'left\n', 'right\n', 'merged' + last_char])
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
173
    return f, parents
174
175
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
176
def get_diamond_files(files, key_length, trailing_eol=True, left_only=False,
177
    nograph=False):
178
    """Get a diamond graph to exercise deltas and merges.
179
180
    This creates a 5-node graph in files. If files supports 2-length keys two
181
    graphs are made to exercise the support for multiple ids.
182
    
183
    :param trailing_eol: If True end the last line with \n.
184
    :param key_length: The length of keys in files. Currently supports length 1
185
        and 2 keys.
186
    :param left_only: If True do not add the right and merged nodes.
187
    :param nograph: If True, do not provide parents to the add_lines calls;
188
        this is useful for tests that need inserted data but have graphless
189
        stores.
190
    :return: The results of the add_lines calls.
191
    """
192
    if key_length == 1:
193
        prefixes = [()]
194
    else:
195
        prefixes = [('FileA',), ('FileB',)]
196
    # insert a diamond graph to exercise deltas and merges.
197
    if trailing_eol:
198
        last_char = '\n'
199
    else:
200
        last_char = ''
201
    result = []
202
    def get_parents(suffix_list):
203
        if nograph:
204
            return ()
205
        else:
206
            result = [prefix + suffix for suffix in suffix_list]
207
            return result
208
    # we loop over each key because that spreads the inserts across prefixes,
209
    # which is how commit operates.
210
    for prefix in prefixes:
211
        result.append(files.add_lines(prefix + ('origin',), (),
212
            ['origin' + last_char]))
213
    for prefix in prefixes:
214
        result.append(files.add_lines(prefix + ('base',),
215
            get_parents([('origin',)]), ['base' + last_char]))
216
    for prefix in prefixes:
217
        result.append(files.add_lines(prefix + ('left',),
218
            get_parents([('base',)]),
219
            ['base\n', 'left' + last_char]))
220
    if not left_only:
221
        for prefix in prefixes:
222
            result.append(files.add_lines(prefix + ('right',),
223
                get_parents([('base',)]),
224
                ['base\n', 'right' + last_char]))
225
        for prefix in prefixes:
226
            result.append(files.add_lines(prefix + ('merged',),
227
                get_parents([('left',), ('right',)]),
228
                ['base\n', 'left\n', 'right\n', 'merged' + last_char]))
229
    return result
230
231
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
232
class VersionedFileTestMixIn(object):
233
    """A mixin test class for testing VersionedFiles.
234
235
    This is not an adaptor-style test at this point because
236
    theres no dynamic substitution of versioned file implementations,
237
    they are strictly controlled by their owning repositories.
238
    """
239
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
240
    def get_transaction(self):
241
        if not hasattr(self, '_transaction'):
242
            self._transaction = None
243
        return self._transaction
244
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
245
    def test_add(self):
246
        f = self.get_file()
247
        f.add_lines('r0', [], ['a\n', 'b\n'])
248
        f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
1563.2.9 by Robert Collins
Update versionedfile api tests to ensure that data is available after every operation.
249
        def verify_file(f):
250
            versions = f.versions()
251
            self.assertTrue('r0' in versions)
252
            self.assertTrue('r1' in versions)
253
            self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
254
            self.assertEquals(f.get_text('r0'), 'a\nb\n')
255
            self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
1563.2.18 by Robert Collins
get knit repositories really using knits for text storage.
256
            self.assertEqual(2, len(f))
257
            self.assertEqual(2, f.num_versions())
1563.2.9 by Robert Collins
Update versionedfile api tests to ensure that data is available after every operation.
258
    
259
            self.assertRaises(RevisionNotPresent,
260
                f.add_lines, 'r2', ['foo'], [])
261
            self.assertRaises(RevisionAlreadyPresent,
262
                f.add_lines, 'r1', [], [])
263
        verify_file(f)
1666.1.6 by Robert Collins
Make knit the default format.
264
        # this checks that reopen with create=True does not break anything.
265
        f = self.reopen_file(create=True)
1563.2.9 by Robert Collins
Update versionedfile api tests to ensure that data is available after every operation.
266
        verify_file(f)
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
267
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
268
    def test_adds_with_parent_texts(self):
269
        f = self.get_file()
270
        parent_texts = {}
2776.1.1 by Robert Collins
* The ``add_lines`` methods on ``VersionedFile`` implementations has changed
271
        _, _, parent_texts['r0'] = f.add_lines('r0', [], ['a\n', 'b\n'])
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
272
        try:
2776.1.1 by Robert Collins
* The ``add_lines`` methods on ``VersionedFile`` implementations has changed
273
            _, _, parent_texts['r1'] = f.add_lines_with_ghosts('r1',
274
                ['r0', 'ghost'], ['b\n', 'c\n'], parent_texts=parent_texts)
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
275
        except NotImplementedError:
276
            # if the format doesn't support ghosts, just add normally.
2776.1.1 by Robert Collins
* The ``add_lines`` methods on ``VersionedFile`` implementations has changed
277
            _, _, parent_texts['r1'] = f.add_lines('r1',
278
                ['r0'], ['b\n', 'c\n'], parent_texts=parent_texts)
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
279
        f.add_lines('r2', ['r1'], ['c\n', 'd\n'], parent_texts=parent_texts)
280
        self.assertNotEqual(None, parent_texts['r0'])
281
        self.assertNotEqual(None, parent_texts['r1'])
282
        def verify_file(f):
283
            versions = f.versions()
284
            self.assertTrue('r0' in versions)
285
            self.assertTrue('r1' in versions)
286
            self.assertTrue('r2' in versions)
287
            self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
288
            self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
289
            self.assertEquals(f.get_lines('r2'), ['c\n', 'd\n'])
290
            self.assertEqual(3, f.num_versions())
291
            origins = f.annotate('r1')
292
            self.assertEquals(origins[0][0], 'r0')
293
            self.assertEquals(origins[1][0], 'r1')
294
            origins = f.annotate('r2')
295
            self.assertEquals(origins[0][0], 'r1')
296
            self.assertEquals(origins[1][0], 'r2')
297
298
        verify_file(f)
299
        f = self.reopen_file()
300
        verify_file(f)
301
2805.6.7 by Robert Collins
Review feedback.
302
    def test_add_unicode_content(self):
303
        # unicode content is not permitted in versioned files. 
304
        # versioned files version sequences of bytes only.
305
        vf = self.get_file()
306
        self.assertRaises(errors.BzrBadParameterUnicode,
307
            vf.add_lines, 'a', [], ['a\n', u'b\n', 'c\n'])
308
        self.assertRaises(
309
            (errors.BzrBadParameterUnicode, NotImplementedError),
310
            vf.add_lines_with_ghosts, 'a', [], ['a\n', u'b\n', 'c\n'])
311
2520.4.150 by Aaron Bentley
Test that non-Weave uses left_matching_blocks for add_lines
312
    def test_add_follows_left_matching_blocks(self):
313
        """If we change left_matching_blocks, delta changes
314
315
        Note: There are multiple correct deltas in this case, because
316
        we start with 1 "a" and we get 3.
317
        """
318
        vf = self.get_file()
319
        if isinstance(vf, WeaveFile):
320
            raise TestSkipped("WeaveFile ignores left_matching_blocks")
321
        vf.add_lines('1', [], ['a\n'])
322
        vf.add_lines('2', ['1'], ['a\n', 'a\n', 'a\n'],
323
                     left_matching_blocks=[(0, 0, 1), (1, 3, 0)])
2794.1.2 by Robert Collins
Nuke versioned file add/get delta support, allowing easy simplification of unannotated Content, reducing memory copies and friction during commit on unannotated texts.
324
        self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('2'))
2520.4.150 by Aaron Bentley
Test that non-Weave uses left_matching_blocks for add_lines
325
        vf.add_lines('3', ['1'], ['a\n', 'a\n', 'a\n'],
326
                     left_matching_blocks=[(0, 2, 1), (1, 3, 0)])
2794.1.2 by Robert Collins
Nuke versioned file add/get delta support, allowing easy simplification of unannotated Content, reducing memory copies and friction during commit on unannotated texts.
327
        self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('3'))
2520.4.150 by Aaron Bentley
Test that non-Weave uses left_matching_blocks for add_lines
328
2805.6.7 by Robert Collins
Review feedback.
329
    def test_inline_newline_throws(self):
330
        # \r characters are not permitted in lines being added
331
        vf = self.get_file()
332
        self.assertRaises(errors.BzrBadParameterContainsNewline, 
333
            vf.add_lines, 'a', [], ['a\n\n'])
334
        self.assertRaises(
335
            (errors.BzrBadParameterContainsNewline, NotImplementedError),
336
            vf.add_lines_with_ghosts, 'a', [], ['a\n\n'])
337
        # but inline CR's are allowed
338
        vf.add_lines('a', [], ['a\r\n'])
339
        try:
340
            vf.add_lines_with_ghosts('b', [], ['a\r\n'])
341
        except NotImplementedError:
342
            pass
343
2229.2.1 by Aaron Bentley
Reject reserved ids in versiondfile, tree, branch and repository
344
    def test_add_reserved(self):
345
        vf = self.get_file()
346
        self.assertRaises(errors.ReservedId,
347
            vf.add_lines, 'a:', [], ['a\n', 'b\n', 'c\n'])
348
2794.1.1 by Robert Collins
Allow knits to be instructed not to add a text based on a sha, for commit.
349
    def test_add_lines_nostoresha(self):
350
        """When nostore_sha is supplied using old content raises."""
351
        vf = self.get_file()
352
        empty_text = ('a', [])
353
        sample_text_nl = ('b', ["foo\n", "bar\n"])
354
        sample_text_no_nl = ('c', ["foo\n", "bar"])
355
        shas = []
356
        for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
357
            sha, _, _ = vf.add_lines(version, [], lines)
358
            shas.append(sha)
359
        # we now have a copy of all the lines in the vf.
360
        for sha, (version, lines) in zip(
361
            shas, (empty_text, sample_text_nl, sample_text_no_nl)):
362
            self.assertRaises(errors.ExistingContent,
363
                vf.add_lines, version + "2", [], lines,
364
                nostore_sha=sha)
365
            # and no new version should have been added.
366
            self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
367
                version + "2")
368
2803.1.1 by Robert Collins
Fix typo in ghosts version of test_add_lines_nostoresha.
369
    def test_add_lines_with_ghosts_nostoresha(self):
2794.1.1 by Robert Collins
Allow knits to be instructed not to add a text based on a sha, for commit.
370
        """When nostore_sha is supplied using old content raises."""
371
        vf = self.get_file()
372
        empty_text = ('a', [])
373
        sample_text_nl = ('b', ["foo\n", "bar\n"])
374
        sample_text_no_nl = ('c', ["foo\n", "bar"])
375
        shas = []
376
        for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
377
            sha, _, _ = vf.add_lines(version, [], lines)
378
            shas.append(sha)
379
        # we now have a copy of all the lines in the vf.
380
        # is the test applicable to this vf implementation?
381
        try:
382
            vf.add_lines_with_ghosts('d', [], [])
383
        except NotImplementedError:
384
            raise TestSkipped("add_lines_with_ghosts is optional")
385
        for sha, (version, lines) in zip(
386
            shas, (empty_text, sample_text_nl, sample_text_no_nl)):
387
            self.assertRaises(errors.ExistingContent,
388
                vf.add_lines_with_ghosts, version + "2", [], lines,
389
                nostore_sha=sha)
390
            # and no new version should have been added.
391
            self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
392
                version + "2")
393
2776.1.1 by Robert Collins
* The ``add_lines`` methods on ``VersionedFile`` implementations has changed
394
    def test_add_lines_return_value(self):
395
        # add_lines should return the sha1 and the text size.
396
        vf = self.get_file()
397
        empty_text = ('a', [])
398
        sample_text_nl = ('b', ["foo\n", "bar\n"])
399
        sample_text_no_nl = ('c', ["foo\n", "bar"])
400
        # check results for the three cases:
401
        for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
402
            # the first two elements are the same for all versioned files:
403
            # - the digest and the size of the text. For some versioned files
404
            #   additional data is returned in additional tuple elements.
405
            result = vf.add_lines(version, [], lines)
406
            self.assertEqual(3, len(result))
407
            self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
408
                result[0:2])
409
        # parents should not affect the result:
410
        lines = sample_text_nl[1]
411
        self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
412
            vf.add_lines('d', ['b', 'c'], lines)[0:2])
413
2229.2.1 by Aaron Bentley
Reject reserved ids in versiondfile, tree, branch and repository
414
    def test_get_reserved(self):
415
        vf = self.get_file()
416
        self.assertRaises(errors.ReservedId, vf.get_texts, ['b:'])
417
        self.assertRaises(errors.ReservedId, vf.get_lines, 'b:')
418
        self.assertRaises(errors.ReservedId, vf.get_text, 'b:')
419
3468.2.4 by Martin Pool
Test and fix #234748 problems in trailing newline diffs
420
    def test_add_unchanged_last_line_noeol_snapshot(self):
421
        """Add a text with an unchanged last line with no eol should work."""
422
        # Test adding this in a number of chain lengths; because the interface
423
        # for VersionedFile does not allow forcing a specific chain length, we
424
        # just use a small base to get the first snapshot, then a much longer
425
        # first line for the next add (which will make the third add snapshot)
426
        # and so on. 20 has been chosen as an aribtrary figure - knits use 200
427
        # as a capped delta length, but ideally we would have some way of
428
        # tuning the test to the store (e.g. keep going until a snapshot
429
        # happens).
430
        for length in range(20):
431
            version_lines = {}
432
            vf = self.get_file('case-%d' % length)
433
            prefix = 'step-%d'
434
            parents = []
435
            for step in range(length):
436
                version = prefix % step
437
                lines = (['prelude \n'] * step) + ['line']
438
                vf.add_lines(version, parents, lines)
439
                version_lines[version] = lines
440
                parents = [version]
441
            vf.add_lines('no-eol', parents, ['line'])
442
            vf.get_texts(version_lines.keys())
443
            self.assertEqualDiff('line', vf.get_text('no-eol'))
444
445
    def test_get_texts_eol_variation(self):
446
        # similar to the failure in <http://bugs.launchpad.net/234748>
447
        vf = self.get_file()
448
        sample_text_nl = ["line\n"]
449
        sample_text_no_nl = ["line"]
450
        versions = []
451
        version_lines = {}
452
        parents = []
453
        for i in range(4):
454
            version = 'v%d' % i
455
            if i % 2:
456
                lines = sample_text_nl
457
            else:
458
                lines = sample_text_no_nl
459
            # left_matching blocks is an internal api; it operates on the
460
            # *internal* representation for a knit, which is with *all* lines
461
            # being normalised to end with \n - even the final line in a no_nl
462
            # file. Using it here ensures that a broken internal implementation
463
            # (which is what this test tests) will generate a correct line
464
            # delta (which is to say, an empty delta).
465
            vf.add_lines(version, parents, lines,
466
                left_matching_blocks=[(0, 0, 1)])
467
            parents = [version]
468
            versions.append(version)
469
            version_lines[version] = lines
470
        vf.check()
471
        vf.get_texts(versions)
472
        vf.get_texts(reversed(versions))
473
3460.2.1 by Robert Collins
* Inserting a bundle which changes the contents of a file with no trailing
474
    def test_add_lines_with_matching_blocks_noeol_last_line(self):
475
        """Add a text with an unchanged last line with no eol should work."""
476
        from bzrlib import multiparent
477
        # Hand verified sha1 of the text we're adding.
478
        sha1 = '6a1d115ec7b60afb664dc14890b5af5ce3c827a4'
479
        # Create a mpdiff which adds a new line before the trailing line, and
480
        # reuse the last line unaltered (which can cause annotation reuse).
481
        # Test adding this in two situations:
482
        # On top of a new insertion
483
        vf = self.get_file('fulltext')
484
        vf.add_lines('noeol', [], ['line'])
485
        vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
486
            left_matching_blocks=[(0, 1, 1)])
487
        self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
488
        # On top of a delta
489
        vf = self.get_file('delta')
490
        vf.add_lines('base', [], ['line'])
491
        vf.add_lines('noeol', ['base'], ['prelude\n', 'line'])
492
        vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
493
            left_matching_blocks=[(1, 1, 1)])
494
        self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
495
2520.4.85 by Aaron Bentley
Get all test passing (which just proves there aren't enough tests!)
496
    def test_make_mpdiffs(self):
2520.4.3 by Aaron Bentley
Implement plain strategy for extracting and installing multiparent diffs
497
        from bzrlib import multiparent
498
        vf = self.get_file('foo')
499
        sha1s = self._setup_for_deltas(vf)
500
        new_vf = self.get_file('bar')
501
        for version in multiparent.topo_iter(vf):
2520.4.85 by Aaron Bentley
Get all test passing (which just proves there aren't enough tests!)
502
            mpdiff = vf.make_mpdiffs([version])[0]
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
503
            new_vf.add_mpdiffs([(version, vf.get_parent_map([version])[version],
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
504
                                 vf.get_sha1s([version])[version], mpdiff)])
2520.4.3 by Aaron Bentley
Implement plain strategy for extracting and installing multiparent diffs
505
            self.assertEqualDiff(vf.get_text(version),
506
                                 new_vf.get_text(version))
507
3453.3.2 by John Arbash Meinel
Add a test case for the first loop, unable to find a way to trigger the second loop
508
    def test_make_mpdiffs_with_ghosts(self):
509
        vf = self.get_file('foo')
3453.3.4 by John Arbash Meinel
Skip the new test for old weave formats that don't support ghosts
510
        try:
511
            vf.add_lines_with_ghosts('text', ['ghost'], ['line\n'])
512
        except NotImplementedError:
513
            # old Weave formats do not allow ghosts
514
            return
3453.3.2 by John Arbash Meinel
Add a test case for the first loop, unable to find a way to trigger the second loop
515
        self.assertRaises(errors.RevisionNotPresent, vf.make_mpdiffs, ['ghost'])
516
1596.2.38 by Robert Collins
rollback from using deltas to using fulltexts - deltas need more work to be ready.
517
    def _setup_for_deltas(self, f):
2794.1.2 by Robert Collins
Nuke versioned file add/get delta support, allowing easy simplification of unannotated Content, reducing memory copies and friction during commit on unannotated texts.
518
        self.assertFalse(f.has_version('base'))
1596.2.36 by Robert Collins
add a get_delta api to versioned_file.
519
        # add texts that should trip the knit maximum delta chain threshold
520
        # as well as doing parallel chains of data in knits.
521
        # this is done by two chains of 25 insertions
522
        f.add_lines('base', [], ['line\n'])
1596.2.38 by Robert Collins
rollback from using deltas to using fulltexts - deltas need more work to be ready.
523
        f.add_lines('noeol', ['base'], ['line'])
524
        # detailed eol tests:
525
        # shared last line with parent no-eol
526
        f.add_lines('noeolsecond', ['noeol'], ['line\n', 'line'])
527
        # differing last line with parent, both no-eol
528
        f.add_lines('noeolnotshared', ['noeolsecond'], ['line\n', 'phone'])
529
        # add eol following a noneol parent, change content
530
        f.add_lines('eol', ['noeol'], ['phone\n'])
531
        # add eol following a noneol parent, no change content
532
        f.add_lines('eolline', ['noeol'], ['line\n'])
533
        # noeol with no parents:
534
        f.add_lines('noeolbase', [], ['line'])
535
        # noeol preceeding its leftmost parent in the output:
536
        # this is done by making it a merge of two parents with no common
537
        # anestry: noeolbase and noeol with the 
538
        # later-inserted parent the leftmost.
539
        f.add_lines('eolbeforefirstparent', ['noeolbase', 'noeol'], ['line'])
540
        # two identical eol texts
541
        f.add_lines('noeoldup', ['noeol'], ['line'])
1596.2.36 by Robert Collins
add a get_delta api to versioned_file.
542
        next_parent = 'base'
543
        text_name = 'chain1-'
544
        text = ['line\n']
545
        sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
546
                 1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
547
                 2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
548
                 3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
549
                 4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
550
                 5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
551
                 6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
552
                 7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
553
                 8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
554
                 9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
555
                 10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
556
                 11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
557
                 12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
558
                 13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
559
                 14:'2c4b1736566b8ca6051e668de68650686a3922f2',
560
                 15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
561
                 16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
562
                 17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
563
                 18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
564
                 19:'1ebed371807ba5935958ad0884595126e8c4e823',
565
                 20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
566
                 21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
567
                 22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
568
                 23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
569
                 24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
570
                 25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
571
                 }
572
        for depth in range(26):
573
            new_version = text_name + '%s' % depth
574
            text = text + ['line\n']
575
            f.add_lines(new_version, [next_parent], text)
576
            next_parent = new_version
577
        next_parent = 'base'
578
        text_name = 'chain2-'
579
        text = ['line\n']
580
        for depth in range(26):
581
            new_version = text_name + '%s' % depth
582
            text = text + ['line\n']
583
            f.add_lines(new_version, [next_parent], text)
584
            next_parent = new_version
1596.2.38 by Robert Collins
rollback from using deltas to using fulltexts - deltas need more work to be ready.
585
        return sha1s
1596.2.37 by Robert Collins
Switch to delta based content copying in the generic versioned file copier.
586
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
587
    def test_ancestry(self):
588
        f = self.get_file()
1563.2.29 by Robert Collins
Remove all but fetch references to repository.revision_store.
589
        self.assertEqual([], f.get_ancestry([]))
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
590
        f.add_lines('r0', [], ['a\n', 'b\n'])
591
        f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
592
        f.add_lines('r2', ['r0'], ['b\n', 'c\n'])
593
        f.add_lines('r3', ['r2'], ['b\n', 'c\n'])
594
        f.add_lines('rM', ['r1', 'r2'], ['b\n', 'c\n'])
1563.2.29 by Robert Collins
Remove all but fetch references to repository.revision_store.
595
        self.assertEqual([], f.get_ancestry([]))
1563.2.35 by Robert Collins
cleanup deprecation warnings and finish conversion so the inventory is knit based too.
596
        versions = f.get_ancestry(['rM'])
597
        # there are some possibilities:
598
        # r0 r1 r2 rM r3
599
        # r0 r1 r2 r3 rM
600
        # etc
601
        # so we check indexes
602
        r0 = versions.index('r0')
603
        r1 = versions.index('r1')
604
        r2 = versions.index('r2')
605
        self.assertFalse('r3' in versions)
606
        rM = versions.index('rM')
607
        self.assertTrue(r0 < r1)
608
        self.assertTrue(r0 < r2)
609
        self.assertTrue(r1 < rM)
610
        self.assertTrue(r2 < rM)
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
611
612
        self.assertRaises(RevisionNotPresent,
613
            f.get_ancestry, ['rM', 'rX'])
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
614
2530.1.1 by Aaron Bentley
Make topological sorting optional for get_ancestry
615
        self.assertEqual(set(f.get_ancestry('rM')),
616
            set(f.get_ancestry('rM', topo_sorted=False)))
617
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
618
    def test_mutate_after_finish(self):
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
619
        self._transaction = 'before'
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
620
        f = self.get_file()
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
621
        self._transaction = 'after'
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
622
        self.assertRaises(errors.OutSideTransaction, f.add_lines, '', [], [])
623
        self.assertRaises(errors.OutSideTransaction, f.add_lines_with_ghosts, '', [], [])
1563.2.7 by Robert Collins
add versioned file clear_cache entry.
624
        
1563.2.15 by Robert Collins
remove the weavestore assumptions about the number and nature of files it manages.
625
    def test_copy_to(self):
626
        f = self.get_file()
627
        f.add_lines('0', [], ['a\n'])
628
        t = MemoryTransport()
629
        f.copy_to('foo', t)
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
630
        for suffix in self.get_factory().get_suffixes():
1563.2.15 by Robert Collins
remove the weavestore assumptions about the number and nature of files it manages.
631
            self.assertTrue(t.has('foo' + suffix))
632
633
    def test_get_suffixes(self):
634
        f = self.get_file()
635
        # and should be a list
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
636
        self.assertTrue(isinstance(self.get_factory().get_suffixes(), list))
1563.2.15 by Robert Collins
remove the weavestore assumptions about the number and nature of files it manages.
637
3287.5.1 by Robert Collins
Add VersionedFile.get_parent_map.
638
    def test_get_parent_map(self):
639
        f = self.get_file()
640
        f.add_lines('r0', [], ['a\n', 'b\n'])
641
        self.assertEqual(
642
            {'r0':()}, f.get_parent_map(['r0']))
643
        f.add_lines('r1', ['r0'], ['a\n', 'b\n'])
644
        self.assertEqual(
645
            {'r1':('r0',)}, f.get_parent_map(['r1']))
646
        self.assertEqual(
647
            {'r0':(),
648
             'r1':('r0',)},
649
            f.get_parent_map(['r0', 'r1']))
650
        f.add_lines('r2', [], ['a\n', 'b\n'])
651
        f.add_lines('r3', [], ['a\n', 'b\n'])
652
        f.add_lines('m', ['r0', 'r1', 'r2', 'r3'], ['a\n', 'b\n'])
653
        self.assertEqual(
654
            {'m':('r0', 'r1', 'r2', 'r3')}, f.get_parent_map(['m']))
655
        self.assertEqual({}, f.get_parent_map('y'))
656
        self.assertEqual(
657
            {'r0':(),
658
             'r1':('r0',)},
659
            f.get_parent_map(['r0', 'y', 'r1']))
660
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
661
    def test_annotate(self):
662
        f = self.get_file()
663
        f.add_lines('r0', [], ['a\n', 'b\n'])
664
        f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
665
        origins = f.annotate('r1')
666
        self.assertEquals(origins[0][0], 'r1')
667
        self.assertEquals(origins[1][0], 'r0')
668
669
        self.assertRaises(RevisionNotPresent,
670
            f.annotate, 'foo')
671
1563.2.6 by Robert Collins
Start check tests for knits (pending), and remove dead code.
672
    def test_detection(self):
673
        # Test weaves detect corruption.
674
        #
675
        # Weaves contain a checksum of their texts.
676
        # When a text is extracted, this checksum should be
677
        # verified.
678
679
        w = self.get_file_corrupted_text()
680
681
        self.assertEqual('hello\n', w.get_text('v1'))
682
        self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
683
        self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
684
        self.assertRaises(errors.WeaveInvalidChecksum, w.check)
685
686
        w = self.get_file_corrupted_checksum()
687
688
        self.assertEqual('hello\n', w.get_text('v1'))
689
        self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
690
        self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
691
        self.assertRaises(errors.WeaveInvalidChecksum, w.check)
692
693
    def get_file_corrupted_text(self):
694
        """Return a versioned file with corrupt text but valid metadata."""
695
        raise NotImplementedError(self.get_file_corrupted_text)
696
1563.2.9 by Robert Collins
Update versionedfile api tests to ensure that data is available after every operation.
697
    def reopen_file(self, name='foo'):
698
        """Open the versioned file from disk again."""
699
        raise NotImplementedError(self.reopen_file)
700
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
701
    def test_iter_lines_added_or_present_in_versions(self):
702
        # test that we get at least an equalset of the lines added by
703
        # versions in the weave 
704
        # the ordering here is to make a tree so that dumb searches have
705
        # more changes to muck up.
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
706
707
        class InstrumentedProgress(progress.DummyProgress):
708
709
            def __init__(self):
710
711
                progress.DummyProgress.__init__(self)
712
                self.updates = []
713
714
            def update(self, msg=None, current=None, total=None):
715
                self.updates.append((msg, current, total))
716
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
717
        vf = self.get_file()
718
        # add a base to get included
719
        vf.add_lines('base', [], ['base\n'])
720
        # add a ancestor to be included on one side
721
        vf.add_lines('lancestor', [], ['lancestor\n'])
722
        # add a ancestor to be included on the other side
723
        vf.add_lines('rancestor', ['base'], ['rancestor\n'])
724
        # add a child of rancestor with no eofile-nl
725
        vf.add_lines('child', ['rancestor'], ['base\n', 'child\n'])
726
        # add a child of lancestor and base to join the two roots
727
        vf.add_lines('otherchild',
728
                     ['lancestor', 'base'],
729
                     ['base\n', 'lancestor\n', 'otherchild\n'])
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
730
        def iter_with_versions(versions, expected):
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
731
            # now we need to see what lines are returned, and how often.
2975.3.1 by Robert Collins
Change (without backwards compatibility) the
732
            lines = {}
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
733
            progress = InstrumentedProgress()
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
734
            # iterate over the lines
2975.3.1 by Robert Collins
Change (without backwards compatibility) the
735
            for line in vf.iter_lines_added_or_present_in_versions(versions,
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
736
                pb=progress):
2975.3.1 by Robert Collins
Change (without backwards compatibility) the
737
                lines.setdefault(line, 0)
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
738
                lines[line] += 1
2975.3.1 by Robert Collins
Change (without backwards compatibility) the
739
            if []!= progress.updates:
2039.1.2 by Aaron Bentley
Tweak test to avoid catching assert
740
                self.assertEqual(expected, progress.updates)
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
741
            return lines
2147.1.3 by John Arbash Meinel
In knit.py we were re-using a variable in 2 loops, causing bogus progress messages to be generated.
742
        lines = iter_with_versions(['child', 'otherchild'],
743
                                   [('Walking content.', 0, 2),
744
                                    ('Walking content.', 1, 2),
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
745
                                    ('Walking content.', 2, 2)])
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
746
        # we must see child and otherchild
2975.3.1 by Robert Collins
Change (without backwards compatibility) the
747
        self.assertTrue(lines[('child\n', 'child')] > 0)
748
        self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
749
        # we dont care if we got more than that.
750
        
751
        # test all lines
2147.1.3 by John Arbash Meinel
In knit.py we were re-using a variable in 2 loops, causing bogus progress messages to be generated.
752
        lines = iter_with_versions(None, [('Walking content.', 0, 5),
753
                                          ('Walking content.', 1, 5),
754
                                          ('Walking content.', 2, 5),
755
                                          ('Walking content.', 3, 5),
756
                                          ('Walking content.', 4, 5),
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
757
                                          ('Walking content.', 5, 5)])
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
758
        # all lines must be seen at least once
2975.3.1 by Robert Collins
Change (without backwards compatibility) the
759
        self.assertTrue(lines[('base\n', 'base')] > 0)
760
        self.assertTrue(lines[('lancestor\n', 'lancestor')] > 0)
761
        self.assertTrue(lines[('rancestor\n', 'rancestor')] > 0)
762
        self.assertTrue(lines[('child\n', 'child')] > 0)
763
        self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
1594.2.7 by Robert Collins
Add versionedfile.fix_parents api for correcting data post hoc.
764
1594.2.8 by Robert Collins
add ghost aware apis to knits.
765
    def test_add_lines_with_ghosts(self):
766
        # some versioned file formats allow lines to be added with parent
767
        # information that is > than that in the format. Formats that do
768
        # not support this need to raise NotImplementedError on the
769
        # add_lines_with_ghosts api.
770
        vf = self.get_file()
771
        # add a revision with ghost parents
2249.5.12 by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8
772
        # The preferred form is utf8, but we should translate when needed
773
        parent_id_unicode = u'b\xbfse'
774
        parent_id_utf8 = parent_id_unicode.encode('utf8')
1594.2.8 by Robert Collins
add ghost aware apis to knits.
775
        try:
2309.4.7 by John Arbash Meinel
Update VersionedFile tests to ensure that they can take Unicode,
776
            vf.add_lines_with_ghosts('notbxbfse', [parent_id_utf8], [])
1594.2.8 by Robert Collins
add ghost aware apis to knits.
777
        except NotImplementedError:
778
            # check the other ghost apis are also not implemented
779
            self.assertRaises(NotImplementedError, vf.get_ancestry_with_ghosts, ['foo'])
780
            self.assertRaises(NotImplementedError, vf.get_parents_with_ghosts, 'foo')
781
            return
2150.2.1 by Robert Collins
Correctly decode utf8 revision ids from knits when parsing, fixes a regression where a unicode revision id is stored correctly, but then indexed by the utf8 value on the next invocation of bzr, rather than the unicode value.
782
        vf = self.reopen_file()
1594.2.8 by Robert Collins
add ghost aware apis to knits.
783
        # test key graph related apis: getncestry, _graph, get_parents
784
        # has_version
785
        # - these are ghost unaware and must not be reflect ghosts
2249.5.12 by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8
786
        self.assertEqual(['notbxbfse'], vf.get_ancestry('notbxbfse'))
787
        self.assertFalse(vf.has_version(parent_id_utf8))
1594.2.8 by Robert Collins
add ghost aware apis to knits.
788
        # we have _with_ghost apis to give us ghost information.
2249.5.12 by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8
789
        self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry_with_ghosts(['notbxbfse']))
790
        self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
1594.2.8 by Robert Collins
add ghost aware apis to knits.
791
        # if we add something that is a ghost of another, it should correct the
792
        # results of the prior apis
2858.2.1 by Martin Pool
Remove most calls to safe_file_id and safe_revision_id.
793
        vf.add_lines(parent_id_utf8, [], [])
2249.5.12 by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8
794
        self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry(['notbxbfse']))
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
795
        self.assertEqual({'notbxbfse':(parent_id_utf8,)},
796
            vf.get_parent_map(['notbxbfse']))
2249.5.12 by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8
797
        self.assertTrue(vf.has_version(parent_id_utf8))
1594.2.8 by Robert Collins
add ghost aware apis to knits.
798
        # we have _with_ghost apis to give us ghost information.
2858.2.1 by Martin Pool
Remove most calls to safe_file_id and safe_revision_id.
799
        self.assertEqual([parent_id_utf8, 'notbxbfse'],
800
            vf.get_ancestry_with_ghosts(['notbxbfse']))
2249.5.12 by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8
801
        self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
1594.2.8 by Robert Collins
add ghost aware apis to knits.
802
1594.2.9 by Robert Collins
Teach Knit repositories how to handle ghosts without corrupting at all.
803
    def test_add_lines_with_ghosts_after_normal_revs(self):
804
        # some versioned file formats allow lines to be added with parent
805
        # information that is > than that in the format. Formats that do
806
        # not support this need to raise NotImplementedError on the
807
        # add_lines_with_ghosts api.
808
        vf = self.get_file()
809
        # probe for ghost support
810
        try:
3287.6.5 by Robert Collins
Deprecate VersionedFile.has_ghost.
811
            vf.add_lines_with_ghosts('base', [], ['line\n', 'line_b\n'])
1594.2.9 by Robert Collins
Teach Knit repositories how to handle ghosts without corrupting at all.
812
        except NotImplementedError:
813
            return
814
        vf.add_lines_with_ghosts('references_ghost',
815
                                 ['base', 'a_ghost'],
816
                                 ['line\n', 'line_b\n', 'line_c\n'])
817
        origins = vf.annotate('references_ghost')
818
        self.assertEquals(('base', 'line\n'), origins[0])
819
        self.assertEquals(('base', 'line_b\n'), origins[1])
820
        self.assertEquals(('references_ghost', 'line_c\n'), origins[2])
1594.2.23 by Robert Collins
Test versioned file storage handling of clean/dirty status for accessed versioned files.
821
822
    def test_readonly_mode(self):
823
        transport = get_transport(self.get_url('.'))
824
        factory = self.get_factory()
825
        vf = factory('id', transport, 0777, create=True, access_mode='w')
826
        vf = factory('id', transport, access_mode='r')
827
        self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
828
        self.assertRaises(errors.ReadOnlyError,
829
                          vf.add_lines_with_ghosts,
830
                          'base',
831
                          [],
832
                          [])
1666.1.6 by Robert Collins
Make knit the default format.
833
    
3316.2.9 by Robert Collins
* ``VersionedFile.get_sha1`` is deprecated, please use
834
    def test_get_sha1s(self):
1666.1.6 by Robert Collins
Make knit the default format.
835
        # check the sha1 data is available
836
        vf = self.get_file()
837
        # a simple file
838
        vf.add_lines('a', [], ['a\n'])
839
        # the same file, different metadata
840
        vf.add_lines('b', ['a'], ['a\n'])
841
        # a file differing only in last newline.
842
        vf.add_lines('c', [], ['a'])
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
843
        self.assertEqual({
844
            'a': '3f786850e387550fdab836ed7e6dc881de23001b',
845
            'c': '86f7e437faa5a7fce15d1ddcb9eaeaea377667b8',
846
            'b': '3f786850e387550fdab836ed7e6dc881de23001b',
847
            },
848
            vf.get_sha1s(['a', 'c', 'b']))
1594.2.9 by Robert Collins
Teach Knit repositories how to handle ghosts without corrupting at all.
849
        
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
850
2535.3.1 by Andrew Bennetts
Add get_format_signature to VersionedFile
851
class TestWeave(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
852
853
    def get_file(self, name='foo'):
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
854
        return WeaveFile(name, get_transport(self.get_url('.')), create=True,
855
            get_scope=self.get_transaction)
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
856
1563.2.6 by Robert Collins
Start check tests for knits (pending), and remove dead code.
857
    def get_file_corrupted_text(self):
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
858
        w = WeaveFile('foo', get_transport(self.get_url('.')), create=True,
859
            get_scope=self.get_transaction)
1563.2.13 by Robert Collins
InterVersionedFile implemented.
860
        w.add_lines('v1', [], ['hello\n'])
861
        w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
1563.2.6 by Robert Collins
Start check tests for knits (pending), and remove dead code.
862
        
863
        # We are going to invasively corrupt the text
864
        # Make sure the internals of weave are the same
865
        self.assertEqual([('{', 0)
866
                        , 'hello\n'
867
                        , ('}', None)
868
                        , ('{', 1)
869
                        , 'there\n'
870
                        , ('}', None)
871
                        ], w._weave)
872
        
873
        self.assertEqual(['f572d396fae9206628714fb2ce00f72e94f2258f'
874
                        , '90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
875
                        ], w._sha1s)
876
        w.check()
877
        
878
        # Corrupted
879
        w._weave[4] = 'There\n'
880
        return w
881
882
    def get_file_corrupted_checksum(self):
883
        w = self.get_file_corrupted_text()
884
        # Corrected
885
        w._weave[4] = 'there\n'
886
        self.assertEqual('hello\nthere\n', w.get_text('v2'))
887
        
888
        #Invalid checksum, first digit changed
889
        w._sha1s[1] =  'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'
890
        return w
891
1666.1.6 by Robert Collins
Make knit the default format.
892
    def reopen_file(self, name='foo', create=False):
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
893
        return WeaveFile(name, get_transport(self.get_url('.')), create=create,
894
            get_scope=self.get_transaction)
1563.2.9 by Robert Collins
Update versionedfile api tests to ensure that data is available after every operation.
895
1563.2.25 by Robert Collins
Merge in upstream.
896
    def test_no_implicit_create(self):
897
        self.assertRaises(errors.NoSuchFile,
898
                          WeaveFile,
899
                          'foo',
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
900
                          get_transport(self.get_url('.')),
901
                          get_scope=self.get_transaction)
1563.2.25 by Robert Collins
Merge in upstream.
902
1594.2.23 by Robert Collins
Test versioned file storage handling of clean/dirty status for accessed versioned files.
903
    def get_factory(self):
904
        return WeaveFile
905
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
906
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
907
class TestPlanMergeVersionedFile(TestCaseWithMemoryTransport):
908
909
    def setUp(self):
910
        TestCaseWithMemoryTransport.setUp(self)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
911
        mapper = PrefixMapper()
912
        factory = make_file_factory(True, mapper)
913
        self.vf1 = factory(self.get_transport('root-1'))
914
        self.vf2 = factory(self.get_transport('root-2'))
915
        self.plan_merge_vf = versionedfile._PlanMergeVersionedFile('root')
916
        self.plan_merge_vf.fallback_versionedfiles.extend([self.vf1, self.vf2])
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
917
918
    def test_add_lines(self):
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
919
        self.plan_merge_vf.add_lines(('root', 'a:'), [], [])
920
        self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
921
            ('root', 'a'), [], [])
922
        self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
923
            ('root', 'a:'), None, [])
924
        self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
925
            ('root', 'a:'), [], None)
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
926
927
    def setup_abcde(self):
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
928
        self.vf1.add_lines(('root', 'A'), [], ['a'])
929
        self.vf1.add_lines(('root', 'B'), [('root', 'A')], ['b'])
930
        self.vf2.add_lines(('root', 'C'), [], ['c'])
931
        self.vf2.add_lines(('root', 'D'), [('root', 'C')], ['d'])
932
        self.plan_merge_vf.add_lines(('root', 'E:'),
933
            [('root', 'B'), ('root', 'D')], ['e'])
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
934
935
    def test_get_parents(self):
936
        self.setup_abcde()
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
937
        self.assertEqual({('root', 'B'):(('root', 'A'),)},
938
            self.plan_merge_vf.get_parent_map([('root', 'B')]))
939
        self.assertEqual({('root', 'D'):(('root', 'C'),)},
940
            self.plan_merge_vf.get_parent_map([('root', 'D')]))
941
        self.assertEqual({('root', 'E:'):(('root', 'B'),('root', 'D'))},
942
            self.plan_merge_vf.get_parent_map([('root', 'E:')]))
943
        self.assertEqual({},
944
            self.plan_merge_vf.get_parent_map([('root', 'F')]))
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
945
        self.assertEqual({
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
946
                ('root', 'B'):(('root', 'A'),),
947
                ('root', 'D'):(('root', 'C'),),
948
                ('root', 'E:'):(('root', 'B'),('root', 'D')),
949
                },
950
            self.plan_merge_vf.get_parent_map(
951
                [('root', 'B'), ('root', 'D'), ('root', 'E:'), ('root', 'F')]))
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
952
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
953
    def test_get_record_stream(self):
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
954
        self.setup_abcde()
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
955
        def get_record(suffix):
956
            return self.plan_merge_vf.get_record_stream(
957
                [('root', suffix)], 'unordered', True).next()
958
        self.assertEqual('a', get_record('A').get_bytes_as('fulltext'))
959
        self.assertEqual('c', get_record('C').get_bytes_as('fulltext'))
960
        self.assertEqual('e', get_record('E:').get_bytes_as('fulltext'))
961
        self.assertEqual('absent', get_record('F').storage_kind)
1666.1.1 by Robert Collins
Add trivial http-using test for versioned files.
962
963
964
class TestReadonlyHttpMixin(object):
965
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
966
    def get_transaction(self):
967
        return 1
968
1666.1.1 by Robert Collins
Add trivial http-using test for versioned files.
969
    def test_readonly_http_works(self):
970
        # we should be able to read from http with a versioned file.
971
        vf = self.get_file()
1666.1.6 by Robert Collins
Make knit the default format.
972
        # try an empty file access
973
        readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
974
        self.assertEqual([], readonly_vf.versions())
975
        # now with feeling.
1666.1.1 by Robert Collins
Add trivial http-using test for versioned files.
976
        vf.add_lines('1', [], ['a\n'])
977
        vf.add_lines('2', ['1'], ['b\n', 'a\n'])
978
        readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
1666.1.6 by Robert Collins
Make knit the default format.
979
        self.assertEqual(['1', '2'], vf.versions())
1666.1.1 by Robert Collins
Add trivial http-using test for versioned files.
980
        for version in readonly_vf.versions():
981
            readonly_vf.get_lines(version)
982
983
984
class TestWeaveHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
985
986
    def get_file(self):
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
987
        return WeaveFile('foo', get_transport(self.get_url('.')), create=True,
988
            get_scope=self.get_transaction)
1666.1.1 by Robert Collins
Add trivial http-using test for versioned files.
989
990
    def get_factory(self):
991
        return WeaveFile
992
993
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
994
class MergeCasesMixin(object):
995
996
    def doMerge(self, base, a, b, mp):
997
        from cStringIO import StringIO
998
        from textwrap import dedent
999
1000
        def addcrlf(x):
1001
            return x + '\n'
1002
        
1003
        w = self.get_file()
1004
        w.add_lines('text0', [], map(addcrlf, base))
1005
        w.add_lines('text1', ['text0'], map(addcrlf, a))
1006
        w.add_lines('text2', ['text0'], map(addcrlf, b))
1007
1008
        self.log_contents(w)
1009
1010
        self.log('merge plan:')
1011
        p = list(w.plan_merge('text1', 'text2'))
1012
        for state, line in p:
1013
            if line:
1014
                self.log('%12s | %s' % (state, line[:-1]))
1015
1016
        self.log('merge:')
1017
        mt = StringIO()
1018
        mt.writelines(w.weave_merge(p))
1019
        mt.seek(0)
1020
        self.log(mt.getvalue())
1021
1022
        mp = map(addcrlf, mp)
1023
        self.assertEqual(mt.readlines(), mp)
1024
        
1025
        
1026
    def testOneInsert(self):
1027
        self.doMerge([],
1028
                     ['aa'],
1029
                     [],
1030
                     ['aa'])
1031
1032
    def testSeparateInserts(self):
1033
        self.doMerge(['aaa', 'bbb', 'ccc'],
1034
                     ['aaa', 'xxx', 'bbb', 'ccc'],
1035
                     ['aaa', 'bbb', 'yyy', 'ccc'],
1036
                     ['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1037
1038
    def testSameInsert(self):
1039
        self.doMerge(['aaa', 'bbb', 'ccc'],
1040
                     ['aaa', 'xxx', 'bbb', 'ccc'],
1041
                     ['aaa', 'xxx', 'bbb', 'yyy', 'ccc'],
1042
                     ['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1043
    overlappedInsertExpected = ['aaa', 'xxx', 'yyy', 'bbb']
1044
    def testOverlappedInsert(self):
1045
        self.doMerge(['aaa', 'bbb'],
1046
                     ['aaa', 'xxx', 'yyy', 'bbb'],
1047
                     ['aaa', 'xxx', 'bbb'], self.overlappedInsertExpected)
1048
1049
        # really it ought to reduce this to 
1050
        # ['aaa', 'xxx', 'yyy', 'bbb']
1051
1052
1053
    def testClashReplace(self):
1054
        self.doMerge(['aaa'],
1055
                     ['xxx'],
1056
                     ['yyy', 'zzz'],
1057
                     ['<<<<<<< ', 'xxx', '=======', 'yyy', 'zzz', 
1058
                      '>>>>>>> '])
1059
1060
    def testNonClashInsert1(self):
1061
        self.doMerge(['aaa'],
1062
                     ['xxx', 'aaa'],
1063
                     ['yyy', 'zzz'],
1064
                     ['<<<<<<< ', 'xxx', 'aaa', '=======', 'yyy', 'zzz', 
1065
                      '>>>>>>> '])
1066
1067
    def testNonClashInsert2(self):
1068
        self.doMerge(['aaa'],
1069
                     ['aaa'],
1070
                     ['yyy', 'zzz'],
1071
                     ['yyy', 'zzz'])
1072
1073
1074
    def testDeleteAndModify(self):
1075
        """Clashing delete and modification.
1076
1077
        If one side modifies a region and the other deletes it then
1078
        there should be a conflict with one side blank.
1079
        """
1080
1081
        #######################################
1082
        # skippd, not working yet
1083
        return
1084
        
1085
        self.doMerge(['aaa', 'bbb', 'ccc'],
1086
                     ['aaa', 'ddd', 'ccc'],
1087
                     ['aaa', 'ccc'],
1088
                     ['<<<<<<<< ', 'aaa', '=======', '>>>>>>> ', 'ccc'])
1089
1090
    def _test_merge_from_strings(self, base, a, b, expected):
1091
        w = self.get_file()
1092
        w.add_lines('text0', [], base.splitlines(True))
1093
        w.add_lines('text1', ['text0'], a.splitlines(True))
1094
        w.add_lines('text2', ['text0'], b.splitlines(True))
1095
        self.log('merge plan:')
1096
        p = list(w.plan_merge('text1', 'text2'))
1097
        for state, line in p:
1098
            if line:
1099
                self.log('%12s | %s' % (state, line[:-1]))
1100
        self.log('merge result:')
1101
        result_text = ''.join(w.weave_merge(p))
1102
        self.log(result_text)
1103
        self.assertEqualDiff(result_text, expected)
1104
1105
    def test_weave_merge_conflicts(self):
1106
        # does weave merge properly handle plans that end with unchanged?
1107
        result = ''.join(self.get_file().weave_merge([('new-a', 'hello\n')]))
1108
        self.assertEqual(result, 'hello\n')
1109
1110
    def test_deletion_extended(self):
1111
        """One side deletes, the other deletes more.
1112
        """
1113
        base = """\
1114
            line 1
1115
            line 2
1116
            line 3
1117
            """
1118
        a = """\
1119
            line 1
1120
            line 2
1121
            """
1122
        b = """\
1123
            line 1
1124
            """
1125
        result = """\
1126
            line 1
1127
            """
1128
        self._test_merge_from_strings(base, a, b, result)
1129
1130
    def test_deletion_overlap(self):
1131
        """Delete overlapping regions with no other conflict.
1132
1133
        Arguably it'd be better to treat these as agreement, rather than 
1134
        conflict, but for now conflict is safer.
1135
        """
1136
        base = """\
1137
            start context
1138
            int a() {}
1139
            int b() {}
1140
            int c() {}
1141
            end context
1142
            """
1143
        a = """\
1144
            start context
1145
            int a() {}
1146
            end context
1147
            """
1148
        b = """\
1149
            start context
1150
            int c() {}
1151
            end context
1152
            """
1153
        result = """\
1154
            start context
1155
<<<<<<< 
1156
            int a() {}
1157
=======
1158
            int c() {}
1159
>>>>>>> 
1160
            end context
1161
            """
1162
        self._test_merge_from_strings(base, a, b, result)
1163
1164
    def test_agreement_deletion(self):
1165
        """Agree to delete some lines, without conflicts."""
1166
        base = """\
1167
            start context
1168
            base line 1
1169
            base line 2
1170
            end context
1171
            """
1172
        a = """\
1173
            start context
1174
            base line 1
1175
            end context
1176
            """
1177
        b = """\
1178
            start context
1179
            base line 1
1180
            end context
1181
            """
1182
        result = """\
1183
            start context
1184
            base line 1
1185
            end context
1186
            """
1187
        self._test_merge_from_strings(base, a, b, result)
1188
1189
    def test_sync_on_deletion(self):
1190
        """Specific case of merge where we can synchronize incorrectly.
1191
        
1192
        A previous version of the weave merge concluded that the two versions
1193
        agreed on deleting line 2, and this could be a synchronization point.
1194
        Line 1 was then considered in isolation, and thought to be deleted on 
1195
        both sides.
1196
1197
        It's better to consider the whole thing as a disagreement region.
1198
        """
1199
        base = """\
1200
            start context
1201
            base line 1
1202
            base line 2
1203
            end context
1204
            """
1205
        a = """\
1206
            start context
1207
            base line 1
1208
            a's replacement line 2
1209
            end context
1210
            """
1211
        b = """\
1212
            start context
1213
            b replaces
1214
            both lines
1215
            end context
1216
            """
1217
        result = """\
1218
            start context
1219
<<<<<<< 
1220
            base line 1
1221
            a's replacement line 2
1222
=======
1223
            b replaces
1224
            both lines
1225
>>>>>>> 
1226
            end context
1227
            """
1228
        self._test_merge_from_strings(base, a, b, result)
1229
1230
2535.3.1 by Andrew Bennetts
Add get_format_signature to VersionedFile
1231
class TestWeaveMerge(TestCaseWithMemoryTransport, MergeCasesMixin):
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1232
1233
    def get_file(self, name='foo'):
1234
        return WeaveFile(name, get_transport(self.get_url('.')), create=True)
1235
1236
    def log_contents(self, w):
1237
        self.log('weave is:')
1238
        tmpf = StringIO()
1239
        write_weave(w, tmpf)
1240
        self.log(tmpf.getvalue())
1241
1242
    overlappedInsertExpected = ['aaa', '<<<<<<< ', 'xxx', 'yyy', '=======', 
1243
                                'xxx', '>>>>>>> ', 'bbb']
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1244
1245
1246
class TestContentFactoryAdaption(TestCaseWithMemoryTransport):
1247
1248
    def test_select_adaptor(self):
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
1249
        """Test expected adapters exist."""
1250
        # One scenario for each lookup combination we expect to use.
1251
        # Each is source_kind, requested_kind, adapter class
1252
        scenarios = [
1253
            ('knit-delta-gz', 'fulltext', _mod_knit.DeltaPlainToFullText),
1254
            ('knit-ft-gz', 'fulltext', _mod_knit.FTPlainToFullText),
1255
            ('knit-annotated-delta-gz', 'knit-delta-gz',
1256
                _mod_knit.DeltaAnnotatedToUnannotated),
1257
            ('knit-annotated-delta-gz', 'fulltext',
1258
                _mod_knit.DeltaAnnotatedToFullText),
1259
            ('knit-annotated-ft-gz', 'knit-ft-gz',
1260
                _mod_knit.FTAnnotatedToUnannotated),
1261
            ('knit-annotated-ft-gz', 'fulltext',
1262
                _mod_knit.FTAnnotatedToFullText),
1263
            ]
1264
        for source, requested, klass in scenarios:
1265
            adapter_factory = versionedfile.adapter_registry.get(
1266
                (source, requested))
1267
            adapter = adapter_factory(None)
1268
            self.assertIsInstance(adapter, klass)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1269
3350.3.5 by Robert Collins
Create adapters from plain compressed knit content.
1270
    def get_knit(self, annotated=True):
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1271
        mapper = ConstantMapper('knit')
1272
        transport = self.get_transport()
1273
        return make_file_factory(annotated, mapper)(transport)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1274
1275
    def helpGetBytes(self, f, ft_adapter, delta_adapter):
3350.3.22 by Robert Collins
Review feedback.
1276
        """Grab the interested adapted texts for tests."""
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1277
        # origin is a fulltext
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1278
        entries = f.get_record_stream([('origin',)], 'unordered', False)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1279
        base = entries.next()
1280
        ft_data = ft_adapter.get_bytes(base, base.get_bytes_as(base.storage_kind))
1281
        # merged is both a delta and multiple parents.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1282
        entries = f.get_record_stream([('merged',)], 'unordered', False)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1283
        merged = entries.next()
1284
        delta_data = delta_adapter.get_bytes(merged,
1285
            merged.get_bytes_as(merged.storage_kind))
1286
        return ft_data, delta_data
1287
1288
    def test_deannotation_noeol(self):
1289
        """Test converting annotated knits to unannotated knits."""
1290
        # we need a full text, and a delta
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1291
        f = self.get_knit()
1292
        get_diamond_files(f, 1, trailing_eol=False)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1293
        ft_data, delta_data = self.helpGetBytes(f,
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
1294
            _mod_knit.FTAnnotatedToUnannotated(None),
1295
            _mod_knit.DeltaAnnotatedToUnannotated(None))
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1296
        self.assertEqual(
1297
            'version origin 1 b284f94827db1fa2970d9e2014f080413b547a7e\n'
1298
            'origin\n'
1299
            'end origin\n',
1300
            GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1301
        self.assertEqual(
1302
            'version merged 4 32c2e79763b3f90e8ccde37f9710b6629c25a796\n'
1303
            '1,2,3\nleft\nright\nmerged\nend merged\n',
1304
            GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1305
1306
    def test_deannotation(self):
1307
        """Test converting annotated knits to unannotated knits."""
1308
        # we need a full text, and a delta
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1309
        f = self.get_knit()
1310
        get_diamond_files(f, 1)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1311
        ft_data, delta_data = self.helpGetBytes(f,
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
1312
            _mod_knit.FTAnnotatedToUnannotated(None),
1313
            _mod_knit.DeltaAnnotatedToUnannotated(None))
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1314
        self.assertEqual(
1315
            'version origin 1 00e364d235126be43292ab09cb4686cf703ddc17\n'
1316
            'origin\n'
1317
            'end origin\n',
1318
            GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1319
        self.assertEqual(
1320
            'version merged 3 ed8bce375198ea62444dc71952b22cfc2b09226d\n'
1321
            '2,2,2\nright\nmerged\nend merged\n',
1322
            GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1323
1324
    def test_annotated_to_fulltext_no_eol(self):
1325
        """Test adapting annotated knits to full texts (for -> weaves)."""
1326
        # we need a full text, and a delta
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1327
        f = self.get_knit()
1328
        get_diamond_files(f, 1, trailing_eol=False)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1329
        # Reconstructing a full text requires a backing versioned file, and it
1330
        # must have the base lines requested from it.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1331
        logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1332
        ft_data, delta_data = self.helpGetBytes(f,
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
1333
            _mod_knit.FTAnnotatedToFullText(None),
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1334
            _mod_knit.DeltaAnnotatedToFullText(logged_vf))
1335
        self.assertEqual('origin', ft_data)
1336
        self.assertEqual('base\nleft\nright\nmerged', delta_data)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1337
        self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1338
            True)], logged_vf.calls)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1339
1340
    def test_annotated_to_fulltext(self):
1341
        """Test adapting annotated knits to full texts (for -> weaves)."""
1342
        # we need a full text, and a delta
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1343
        f = self.get_knit()
1344
        get_diamond_files(f, 1)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1345
        # Reconstructing a full text requires a backing versioned file, and it
1346
        # must have the base lines requested from it.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1347
        logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1348
        ft_data, delta_data = self.helpGetBytes(f,
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
1349
            _mod_knit.FTAnnotatedToFullText(None),
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1350
            _mod_knit.DeltaAnnotatedToFullText(logged_vf))
1351
        self.assertEqual('origin\n', ft_data)
1352
        self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1353
        self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1354
            True)], logged_vf.calls)
3350.3.5 by Robert Collins
Create adapters from plain compressed knit content.
1355
1356
    def test_unannotated_to_fulltext(self):
1357
        """Test adapting unannotated knits to full texts.
1358
        
1359
        This is used for -> weaves, and for -> annotated knits.
1360
        """
1361
        # we need a full text, and a delta
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1362
        f = self.get_knit(annotated=False)
1363
        get_diamond_files(f, 1)
3350.3.5 by Robert Collins
Create adapters from plain compressed knit content.
1364
        # Reconstructing a full text requires a backing versioned file, and it
1365
        # must have the base lines requested from it.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1366
        logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
3350.3.5 by Robert Collins
Create adapters from plain compressed knit content.
1367
        ft_data, delta_data = self.helpGetBytes(f,
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
1368
            _mod_knit.FTPlainToFullText(None),
3350.3.5 by Robert Collins
Create adapters from plain compressed knit content.
1369
            _mod_knit.DeltaPlainToFullText(logged_vf))
1370
        self.assertEqual('origin\n', ft_data)
1371
        self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1372
        self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1373
            True)], logged_vf.calls)
3350.3.5 by Robert Collins
Create adapters from plain compressed knit content.
1374
3350.3.6 by Robert Collins
Test EOL behaviour of plain knit record adapters.
1375
    def test_unannotated_to_fulltext_no_eol(self):
1376
        """Test adapting unannotated knits to full texts.
1377
        
1378
        This is used for -> weaves, and for -> annotated knits.
1379
        """
1380
        # we need a full text, and a delta
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1381
        f = self.get_knit(annotated=False)
1382
        get_diamond_files(f, 1, trailing_eol=False)
3350.3.6 by Robert Collins
Test EOL behaviour of plain knit record adapters.
1383
        # Reconstructing a full text requires a backing versioned file, and it
1384
        # must have the base lines requested from it.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1385
        logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
3350.3.6 by Robert Collins
Test EOL behaviour of plain knit record adapters.
1386
        ft_data, delta_data = self.helpGetBytes(f,
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
1387
            _mod_knit.FTPlainToFullText(None),
3350.3.6 by Robert Collins
Test EOL behaviour of plain knit record adapters.
1388
            _mod_knit.DeltaPlainToFullText(logged_vf))
1389
        self.assertEqual('origin', ft_data)
1390
        self.assertEqual('base\nleft\nright\nmerged', delta_data)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1391
        self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1392
            True)], logged_vf.calls)
3350.3.6 by Robert Collins
Test EOL behaviour of plain knit record adapters.
1393
3350.6.1 by Robert Collins
* New ``versionedfile.KeyMapper`` interface to abstract out the access to
1394
1395
class TestKeyMapper(TestCaseWithMemoryTransport):
1396
    """Tests for various key mapping logic."""
1397
1398
    def test_identity_mapper(self):
1399
        mapper = versionedfile.ConstantMapper("inventory")
1400
        self.assertEqual("inventory", mapper.map(('foo@ar',)))
1401
        self.assertEqual("inventory", mapper.map(('quux',)))
1402
1403
    def test_prefix_mapper(self):
1404
        #format5: plain
1405
        mapper = versionedfile.PrefixMapper()
1406
        self.assertEqual("file-id", mapper.map(("file-id", "revision-id")))
1407
        self.assertEqual("new-id", mapper.map(("new-id", "revision-id")))
1408
        self.assertEqual(('file-id',), mapper.unmap("file-id"))
1409
        self.assertEqual(('new-id',), mapper.unmap("new-id"))
1410
1411
    def test_hash_prefix_mapper(self):
1412
        #format6: hash + plain
1413
        mapper = versionedfile.HashPrefixMapper()
1414
        self.assertEqual("9b/file-id", mapper.map(("file-id", "revision-id")))
1415
        self.assertEqual("45/new-id", mapper.map(("new-id", "revision-id")))
1416
        self.assertEqual(('file-id',), mapper.unmap("9b/file-id"))
1417
        self.assertEqual(('new-id',), mapper.unmap("45/new-id"))
1418
1419
    def test_hash_escaped_mapper(self):
1420
        #knit1: hash + escaped
1421
        mapper = versionedfile.HashEscapedPrefixMapper()
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1422
        self.assertEqual("88/%2520", mapper.map((" ", "revision-id")))
1423
        self.assertEqual("ed/fil%2545-%2549d", mapper.map(("filE-Id",
1424
            "revision-id")))
1425
        self.assertEqual("88/ne%2557-%2549d", mapper.map(("neW-Id",
1426
            "revision-id")))
1427
        self.assertEqual(('filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))
1428
        self.assertEqual(('neW-Id',), mapper.unmap("88/ne%2557-%2549d"))
3350.6.2 by Robert Collins
Prepare parameterised test environment.
1429
1430
1431
class TestVersionedFiles(TestCaseWithMemoryTransport):
1432
    """Tests for the multiple-file variant of VersionedFile."""
1433
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1434
    def get_versionedfiles(self, relpath='files'):
1435
        transport = self.get_transport(relpath)
1436
        if relpath != '.':
1437
            transport.mkdir('.')
1438
        files = self.factory(transport)
1439
        if self.cleanup is not None:
1440
            self.addCleanup(lambda:self.cleanup(files))
1441
        return files
1442
1443
    def test_annotate(self):
1444
        files = self.get_versionedfiles()
1445
        self.get_diamond_files(files)
1446
        if self.key_length == 1:
1447
            prefix = ()
1448
        else:
1449
            prefix = ('FileA',)
1450
        # introduced full text
1451
        origins = files.annotate(prefix + ('origin',))
1452
        self.assertEqual([
1453
            (prefix + ('origin',), 'origin\n')],
1454
            origins)
1455
        # a delta
1456
        origins = files.annotate(prefix + ('base',))
1457
        self.assertEqual([
1458
            (prefix + ('base',), 'base\n')],
1459
            origins)
1460
        # a merge
1461
        origins = files.annotate(prefix + ('merged',))
1462
        if self.graph:
1463
            self.assertEqual([
1464
                (prefix + ('base',), 'base\n'),
1465
                (prefix + ('left',), 'left\n'),
1466
                (prefix + ('right',), 'right\n'),
1467
                (prefix + ('merged',), 'merged\n')
1468
                ],
1469
                origins)
1470
        else:
1471
            # Without a graph everything is new.
1472
            self.assertEqual([
1473
                (prefix + ('merged',), 'base\n'),
1474
                (prefix + ('merged',), 'left\n'),
1475
                (prefix + ('merged',), 'right\n'),
1476
                (prefix + ('merged',), 'merged\n')
1477
                ],
1478
                origins)
1479
        self.assertRaises(RevisionNotPresent,
1480
            files.annotate, prefix + ('missing-key',))
1481
3350.6.2 by Robert Collins
Prepare parameterised test environment.
1482
    def test_construct(self):
1483
        """Each parameterised test can be constructed on a transport."""
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1484
        files = self.get_versionedfiles()
1485
1486
    def get_diamond_files(self, files, trailing_eol=True, left_only=False):
1487
        return get_diamond_files(files, self.key_length,
1488
            trailing_eol=trailing_eol, nograph=not self.graph,
1489
            left_only=left_only)
1490
1491
    def test_add_lines_return(self):
1492
        files = self.get_versionedfiles()
1493
        # save code by using the stock data insertion helper.
1494
        adds = self.get_diamond_files(files)
1495
        results = []
1496
        # We can only validate the first 2 elements returned from add_lines.
1497
        for add in adds:
1498
            self.assertEqual(3, len(add))
1499
            results.append(add[:2])
1500
        if self.key_length == 1:
1501
            self.assertEqual([
1502
                ('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1503
                ('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1504
                ('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1505
                ('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1506
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1507
                results)
1508
        elif self.key_length == 2:
1509
            self.assertEqual([
1510
                ('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1511
                ('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1512
                ('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1513
                ('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1514
                ('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1515
                ('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1516
                ('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1517
                ('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1518
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1519
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1520
                results)
1521
1522
    def test_empty_lines(self):
1523
        """Empty files can be stored."""
1524
        f = self.get_versionedfiles()
1525
        key_a = self.get_simple_key('a')
1526
        f.add_lines(key_a, [], [])
1527
        self.assertEqual('',
1528
            f.get_record_stream([key_a], 'unordered', True
1529
                ).next().get_bytes_as('fulltext'))
1530
        key_b = self.get_simple_key('b')
1531
        f.add_lines(key_b, self.get_parents([key_a]), [])
1532
        self.assertEqual('',
1533
            f.get_record_stream([key_b], 'unordered', True
1534
                ).next().get_bytes_as('fulltext'))
1535
1536
    def test_newline_only(self):
1537
        f = self.get_versionedfiles()
1538
        key_a = self.get_simple_key('a')
1539
        f.add_lines(key_a, [], ['\n'])
1540
        self.assertEqual('\n',
1541
            f.get_record_stream([key_a], 'unordered', True
1542
                ).next().get_bytes_as('fulltext'))
1543
        key_b = self.get_simple_key('b')
1544
        f.add_lines(key_b, self.get_parents([key_a]), ['\n'])
1545
        self.assertEqual('\n',
1546
            f.get_record_stream([key_b], 'unordered', True
1547
                ).next().get_bytes_as('fulltext'))
1548
1549
    def test_get_record_stream_empty(self):
1550
        """An empty stream can be requested without error."""
1551
        f = self.get_versionedfiles()
1552
        entries = f.get_record_stream([], 'unordered', False)
1553
        self.assertEqual([], list(entries))
1554
1555
    def assertValidStorageKind(self, storage_kind):
1556
        """Assert that storage_kind is a valid storage_kind."""
1557
        self.assertSubset([storage_kind],
1558
            ['mpdiff', 'knit-annotated-ft', 'knit-annotated-delta',
1559
             'knit-ft', 'knit-delta', 'fulltext', 'knit-annotated-ft-gz',
1560
             'knit-annotated-delta-gz', 'knit-ft-gz', 'knit-delta-gz'])
1561
1562
    def capture_stream(self, f, entries, on_seen, parents):
1563
        """Capture a stream for testing."""
1564
        for factory in entries:
1565
            on_seen(factory.key)
1566
            self.assertValidStorageKind(factory.storage_kind)
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
1567
            self.assertEqual(f.get_sha1s([factory.key])[factory.key],
1568
                factory.sha1)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1569
            self.assertEqual(parents[factory.key], factory.parents)
1570
            self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1571
                str)
1572
1573
    def test_get_record_stream_interface(self):
1574
        """each item in a stream has to provide a regular interface."""
1575
        files = self.get_versionedfiles()
1576
        self.get_diamond_files(files)
1577
        keys, _ = self.get_keys_and_sort_order()
1578
        parent_map = files.get_parent_map(keys)
1579
        entries = files.get_record_stream(keys, 'unordered', False)
1580
        seen = set()
1581
        self.capture_stream(files, entries, seen.add, parent_map)
1582
        self.assertEqual(set(keys), seen)
1583
1584
    def get_simple_key(self, suffix):
1585
        """Return a key for the object under test."""
1586
        if self.key_length == 1:
1587
            return (suffix,)
1588
        else:
1589
            return ('FileA',) + (suffix,)
1590
1591
    def get_keys_and_sort_order(self):
1592
        """Get diamond test keys list, and their sort ordering."""
1593
        if self.key_length == 1:
1594
            keys = [('merged',), ('left',), ('right',), ('base',)]
1595
            sort_order = {('merged',):2, ('left',):1, ('right',):1, ('base',):0}
1596
        else:
1597
            keys = [
1598
                ('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1599
                ('FileA', 'base'),
1600
                ('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1601
                ('FileB', 'base'),
1602
                ]
1603
            sort_order = {
1604
                ('FileA', 'merged'):2, ('FileA', 'left'):1, ('FileA', 'right'):1,
1605
                ('FileA', 'base'):0,
1606
                ('FileB', 'merged'):2, ('FileB', 'left'):1, ('FileB', 'right'):1,
1607
                ('FileB', 'base'):0,
1608
                }
1609
        return keys, sort_order
1610
1611
    def test_get_record_stream_interface_ordered(self):
1612
        """each item in a stream has to provide a regular interface."""
1613
        files = self.get_versionedfiles()
1614
        self.get_diamond_files(files)
1615
        keys, sort_order = self.get_keys_and_sort_order()
1616
        parent_map = files.get_parent_map(keys)
1617
        entries = files.get_record_stream(keys, 'topological', False)
1618
        seen = []
1619
        self.capture_stream(files, entries, seen.append, parent_map)
1620
        self.assertStreamOrder(sort_order, seen, keys)
1621
1622
    def test_get_record_stream_interface_ordered_with_delta_closure(self):
1623
        """each item must be accessible as a fulltext."""
1624
        files = self.get_versionedfiles()
1625
        self.get_diamond_files(files)
1626
        keys, sort_order = self.get_keys_and_sort_order()
1627
        parent_map = files.get_parent_map(keys)
1628
        entries = files.get_record_stream(keys, 'topological', True)
1629
        seen = []
1630
        for factory in entries:
1631
            seen.append(factory.key)
1632
            self.assertValidStorageKind(factory.storage_kind)
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
1633
            self.assertSubset([factory.sha1],
1634
                [None, files.get_sha1s([factory.key])[factory.key]])
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1635
            self.assertEqual(parent_map[factory.key], factory.parents)
1636
            # self.assertEqual(files.get_text(factory.key),
1637
            self.assertIsInstance(factory.get_bytes_as('fulltext'), str)
1638
            self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1639
                str)
1640
        self.assertStreamOrder(sort_order, seen, keys)
1641
1642
    def assertStreamOrder(self, sort_order, seen, keys):
1643
        self.assertEqual(len(set(seen)), len(keys))
1644
        if self.key_length == 1:
1645
            lows = {():0}
1646
        else:
1647
            lows = {('FileA',):0, ('FileB',):0}
1648
        if not self.graph:
1649
            self.assertEqual(set(keys), set(seen))
1650
        else:
1651
            for key in seen:
1652
                sort_pos = sort_order[key]
1653
                self.assertTrue(sort_pos >= lows[key[:-1]],
1654
                    "Out of order in sorted stream: %r, %r" % (key, seen))
1655
                lows[key[:-1]] = sort_pos
1656
1657
    def test_get_record_stream_unknown_storage_kind_raises(self):
1658
        """Asking for a storage kind that the stream cannot supply raises."""
1659
        files = self.get_versionedfiles()
1660
        self.get_diamond_files(files)
1661
        if self.key_length == 1:
1662
            keys = [('merged',), ('left',), ('right',), ('base',)]
1663
        else:
1664
            keys = [
1665
                ('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1666
                ('FileA', 'base'),
1667
                ('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1668
                ('FileB', 'base'),
1669
                ]
1670
        parent_map = files.get_parent_map(keys)
1671
        entries = files.get_record_stream(keys, 'unordered', False)
1672
        # We track the contents because we should be able to try, fail a
1673
        # particular kind and then ask for one that works and continue.
1674
        seen = set()
1675
        for factory in entries:
1676
            seen.add(factory.key)
1677
            self.assertValidStorageKind(factory.storage_kind)
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
1678
            self.assertEqual(files.get_sha1s([factory.key])[factory.key],
1679
                factory.sha1)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1680
            self.assertEqual(parent_map[factory.key], factory.parents)
1681
            # currently no stream emits mpdiff
1682
            self.assertRaises(errors.UnavailableRepresentation,
1683
                factory.get_bytes_as, 'mpdiff')
1684
            self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1685
                str)
1686
        self.assertEqual(set(keys), seen)
1687
1688
    def test_get_record_stream_missing_records_are_absent(self):
1689
        files = self.get_versionedfiles()
1690
        self.get_diamond_files(files)
1691
        if self.key_length == 1:
1692
            keys = [('merged',), ('left',), ('right',), ('absent',), ('base',)]
1693
        else:
1694
            keys = [
1695
                ('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1696
                ('FileA', 'absent'), ('FileA', 'base'),
1697
                ('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1698
                ('FileB', 'absent'), ('FileB', 'base'),
1699
                ('absent', 'absent'),
1700
                ]
1701
        parent_map = files.get_parent_map(keys)
1702
        entries = files.get_record_stream(keys, 'unordered', False)
1703
        self.assertAbsentRecord(files, keys, parent_map, entries)
1704
        entries = files.get_record_stream(keys, 'topological', False)
1705
        self.assertAbsentRecord(files, keys, parent_map, entries)
1706
1707
    def assertAbsentRecord(self, files, keys, parents, entries):
1708
        """Helper for test_get_record_stream_missing_records_are_absent."""
1709
        seen = set()
1710
        for factory in entries:
1711
            seen.add(factory.key)
1712
            if factory.key[-1] == 'absent':
1713
                self.assertEqual('absent', factory.storage_kind)
1714
                self.assertEqual(None, factory.sha1)
1715
                self.assertEqual(None, factory.parents)
1716
            else:
1717
                self.assertValidStorageKind(factory.storage_kind)
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
1718
                self.assertEqual(files.get_sha1s([factory.key])[factory.key],
1719
                    factory.sha1)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1720
                self.assertEqual(parents[factory.key], factory.parents)
1721
                self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1722
                    str)
1723
        self.assertEqual(set(keys), seen)
1724
1725
    def test_filter_absent_records(self):
1726
        """Requested missing records can be filter trivially."""
1727
        files = self.get_versionedfiles()
1728
        self.get_diamond_files(files)
1729
        keys, _ = self.get_keys_and_sort_order()
1730
        parent_map = files.get_parent_map(keys)
1731
        # Add an absent record in the middle of the present keys. (We don't ask
1732
        # for just absent keys to ensure that content before and after the
1733
        # absent keys is still delivered).
1734
        present_keys = list(keys)
1735
        if self.key_length == 1:
1736
            keys.insert(2, ('extra',))
1737
        else:
1738
            keys.insert(2, ('extra', 'extra'))
1739
        entries = files.get_record_stream(keys, 'unordered', False)
1740
        seen = set()
1741
        self.capture_stream(files, versionedfile.filter_absent(entries), seen.add,
1742
            parent_map)
1743
        self.assertEqual(set(present_keys), seen)
1744
1745
    def get_mapper(self):
1746
        """Get a mapper suitable for the key length of the test interface."""
1747
        if self.key_length == 1:
1748
            return ConstantMapper('source')
1749
        else:
1750
            return HashEscapedPrefixMapper()
1751
1752
    def get_parents(self, parents):
1753
        """Get parents, taking self.graph into consideration."""
1754
        if self.graph:
1755
            return parents
1756
        else:
1757
            return None
1758
1759
    def test_get_parent_map(self):
1760
        files = self.get_versionedfiles()
1761
        if self.key_length == 1:
1762
            parent_details = [
1763
                (('r0',), self.get_parents(())),
1764
                (('r1',), self.get_parents((('r0',),))),
1765
                (('r2',), self.get_parents(())),
1766
                (('r3',), self.get_parents(())),
1767
                (('m',), self.get_parents((('r0',),('r1',),('r2',),('r3',)))),
1768
                ]
1769
        else:
1770
            parent_details = [
1771
                (('FileA', 'r0'), self.get_parents(())),
1772
                (('FileA', 'r1'), self.get_parents((('FileA', 'r0'),))),
1773
                (('FileA', 'r2'), self.get_parents(())),
1774
                (('FileA', 'r3'), self.get_parents(())),
1775
                (('FileA', 'm'), self.get_parents((('FileA', 'r0'),
1776
                    ('FileA', 'r1'), ('FileA', 'r2'), ('FileA', 'r3')))),
1777
                ]
1778
        for key, parents in parent_details:
1779
            files.add_lines(key, parents, [])
1780
            # immediately after adding it should be queryable.
1781
            self.assertEqual({key:parents}, files.get_parent_map([key]))
1782
        # We can ask for an empty set
1783
        self.assertEqual({}, files.get_parent_map([]))
1784
        # We can ask for many keys
1785
        all_parents = dict(parent_details)
1786
        self.assertEqual(all_parents, files.get_parent_map(all_parents.keys()))
1787
        # Absent keys are just not included in the result.
1788
        keys = all_parents.keys()
1789
        if self.key_length == 1:
1790
            keys.insert(1, ('missing',))
1791
        else:
1792
            keys.insert(1, ('missing', 'missing'))
1793
        # Absent keys are just ignored
1794
        self.assertEqual(all_parents, files.get_parent_map(keys))
1795
1796
    def test_get_sha1s(self):
1797
        files = self.get_versionedfiles()
1798
        self.get_diamond_files(files)
1799
        if self.key_length == 1:
1800
            keys = [('base',), ('origin',), ('left',), ('merged',), ('right',)]
1801
        else:
1802
            # ask for shas from different prefixes.
1803
            keys = [
1804
                ('FileA', 'base'), ('FileB', 'origin'), ('FileA', 'left'),
1805
                ('FileA', 'merged'), ('FileB', 'right'),
1806
                ]
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
1807
        self.assertEqual({
1808
            keys[0]: '51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',
1809
            keys[1]: '00e364d235126be43292ab09cb4686cf703ddc17',
1810
            keys[2]: 'a8478686da38e370e32e42e8a0c220e33ee9132f',
1811
            keys[3]: 'ed8bce375198ea62444dc71952b22cfc2b09226d',
1812
            keys[4]: '9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',
1813
            },
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1814
            files.get_sha1s(keys))
1815
        
1816
    def test_insert_record_stream_empty(self):
1817
        """Inserting an empty record stream should work."""
1818
        files = self.get_versionedfiles()
1819
        files.insert_record_stream([])
1820
1821
    def assertIdenticalVersionedFile(self, expected, actual):
1822
        """Assert that left and right have the same contents."""
1823
        self.assertEqual(set(actual.keys()), set(expected.keys()))
1824
        actual_parents = actual.get_parent_map(actual.keys())
1825
        if self.graph:
1826
            self.assertEqual(actual_parents, expected.get_parent_map(expected.keys()))
1827
        else:
1828
            for key, parents in actual_parents.items():
1829
                self.assertEqual(None, parents)
1830
        for key in actual.keys():
1831
            actual_text = actual.get_record_stream(
1832
                [key], 'unordered', True).next().get_bytes_as('fulltext')
1833
            expected_text = expected.get_record_stream(
1834
                [key], 'unordered', True).next().get_bytes_as('fulltext')
1835
            self.assertEqual(actual_text, expected_text)
1836
1837
    def test_insert_record_stream_fulltexts(self):
1838
        """Any file should accept a stream of fulltexts."""
1839
        files = self.get_versionedfiles()
1840
        mapper = self.get_mapper()
1841
        source_transport = self.get_transport('source')
1842
        source_transport.mkdir('.')
1843
        # weaves always output fulltexts.
1844
        source = make_versioned_files_factory(WeaveFile, mapper)(
1845
            source_transport)
1846
        self.get_diamond_files(source, trailing_eol=False)
1847
        stream = source.get_record_stream(source.keys(), 'topological',
1848
            False)
1849
        files.insert_record_stream(stream)
1850
        self.assertIdenticalVersionedFile(source, files)
1851
1852
    def test_insert_record_stream_fulltexts_noeol(self):
1853
        """Any file should accept a stream of fulltexts."""
1854
        files = self.get_versionedfiles()
1855
        mapper = self.get_mapper()
1856
        source_transport = self.get_transport('source')
1857
        source_transport.mkdir('.')
1858
        # weaves always output fulltexts.
1859
        source = make_versioned_files_factory(WeaveFile, mapper)(
1860
            source_transport)
1861
        self.get_diamond_files(source, trailing_eol=False)
1862
        stream = source.get_record_stream(source.keys(), 'topological',
1863
            False)
1864
        files.insert_record_stream(stream)
1865
        self.assertIdenticalVersionedFile(source, files)
1866
1867
    def test_insert_record_stream_annotated_knits(self):
1868
        """Any file should accept a stream from plain knits."""
1869
        files = self.get_versionedfiles()
1870
        mapper = self.get_mapper()
1871
        source_transport = self.get_transport('source')
1872
        source_transport.mkdir('.')
1873
        source = make_file_factory(True, mapper)(source_transport)
1874
        self.get_diamond_files(source)
1875
        stream = source.get_record_stream(source.keys(), 'topological',
1876
            False)
1877
        files.insert_record_stream(stream)
1878
        self.assertIdenticalVersionedFile(source, files)
1879
1880
    def test_insert_record_stream_annotated_knits_noeol(self):
1881
        """Any file should accept a stream from plain knits."""
1882
        files = self.get_versionedfiles()
1883
        mapper = self.get_mapper()
1884
        source_transport = self.get_transport('source')
1885
        source_transport.mkdir('.')
1886
        source = make_file_factory(True, mapper)(source_transport)
1887
        self.get_diamond_files(source, trailing_eol=False)
1888
        stream = source.get_record_stream(source.keys(), 'topological',
1889
            False)
1890
        files.insert_record_stream(stream)
1891
        self.assertIdenticalVersionedFile(source, files)
1892
1893
    def test_insert_record_stream_plain_knits(self):
1894
        """Any file should accept a stream from plain knits."""
1895
        files = self.get_versionedfiles()
1896
        mapper = self.get_mapper()
1897
        source_transport = self.get_transport('source')
1898
        source_transport.mkdir('.')
1899
        source = make_file_factory(False, mapper)(source_transport)
1900
        self.get_diamond_files(source)
1901
        stream = source.get_record_stream(source.keys(), 'topological',
1902
            False)
1903
        files.insert_record_stream(stream)
1904
        self.assertIdenticalVersionedFile(source, files)
1905
1906
    def test_insert_record_stream_plain_knits_noeol(self):
1907
        """Any file should accept a stream from plain knits."""
1908
        files = self.get_versionedfiles()
1909
        mapper = self.get_mapper()
1910
        source_transport = self.get_transport('source')
1911
        source_transport.mkdir('.')
1912
        source = make_file_factory(False, mapper)(source_transport)
1913
        self.get_diamond_files(source, trailing_eol=False)
1914
        stream = source.get_record_stream(source.keys(), 'topological',
1915
            False)
1916
        files.insert_record_stream(stream)
1917
        self.assertIdenticalVersionedFile(source, files)
1918
1919
    def test_insert_record_stream_existing_keys(self):
1920
        """Inserting keys already in a file should not error."""
1921
        files = self.get_versionedfiles()
1922
        source = self.get_versionedfiles('source')
1923
        self.get_diamond_files(source)
1924
        # insert some keys into f.
1925
        self.get_diamond_files(files, left_only=True)
1926
        stream = source.get_record_stream(source.keys(), 'topological',
1927
            False)
1928
        files.insert_record_stream(stream)
1929
        self.assertIdenticalVersionedFile(source, files)
1930
1931
    def test_insert_record_stream_missing_keys(self):
1932
        """Inserting a stream with absent keys should raise an error."""
1933
        files = self.get_versionedfiles()
1934
        source = self.get_versionedfiles('source')
1935
        stream = source.get_record_stream([('missing',) * self.key_length],
1936
            'topological', False)
1937
        self.assertRaises(errors.RevisionNotPresent, files.insert_record_stream,
1938
            stream)
1939
1940
    def test_insert_record_stream_out_of_order(self):
1941
        """An out of order stream can either error or work."""
1942
        files = self.get_versionedfiles()
1943
        source = self.get_versionedfiles('source')
1944
        self.get_diamond_files(source)
1945
        if self.key_length == 1:
1946
            origin_keys = [('origin',)]
1947
            end_keys = [('merged',), ('left',)]
1948
            start_keys = [('right',), ('base',)]
1949
        else:
1950
            origin_keys = [('FileA', 'origin'), ('FileB', 'origin')]
1951
            end_keys = [('FileA', 'merged',), ('FileA', 'left',),
1952
                ('FileB', 'merged',), ('FileB', 'left',)]
1953
            start_keys = [('FileA', 'right',), ('FileA', 'base',),
1954
                ('FileB', 'right',), ('FileB', 'base',)]
1955
        origin_entries = source.get_record_stream(origin_keys, 'unordered', False)
1956
        end_entries = source.get_record_stream(end_keys, 'topological', False)
1957
        start_entries = source.get_record_stream(start_keys, 'topological', False)
1958
        entries = chain(origin_entries, end_entries, start_entries)
1959
        try:
1960
            files.insert_record_stream(entries)
1961
        except RevisionNotPresent:
1962
            # Must not have corrupted the file.
1963
            files.check()
1964
        else:
1965
            self.assertIdenticalVersionedFile(source, files)
1966
1967
    def test_insert_record_stream_delta_missing_basis_no_corruption(self):
1968
        """Insertion where a needed basis is not included aborts safely."""
1969
        # We use a knit always here to be sure we are getting a binary delta.
1970
        mapper = self.get_mapper()
1971
        source_transport = self.get_transport('source')
1972
        source_transport.mkdir('.')
1973
        source = make_file_factory(False, mapper)(source_transport)
1974
        self.get_diamond_files(source)
1975
        entries = source.get_record_stream(['origin', 'merged'], 'unordered', False)
1976
        files = self.get_versionedfiles()
1977
        self.assertRaises(RevisionNotPresent, files.insert_record_stream,
1978
            entries)
1979
        files.check()
1980
        self.assertEqual({}, files.get_parent_map([]))
1981
1982
    def test_iter_lines_added_or_present_in_keys(self):
1983
        # test that we get at least an equalset of the lines added by
1984
        # versions in the store.
1985
        # the ordering here is to make a tree so that dumb searches have
1986
        # more changes to muck up.
1987
1988
        class InstrumentedProgress(progress.DummyProgress):
1989
1990
            def __init__(self):
1991
1992
                progress.DummyProgress.__init__(self)
1993
                self.updates = []
1994
1995
            def update(self, msg=None, current=None, total=None):
1996
                self.updates.append((msg, current, total))
1997
1998
        files = self.get_versionedfiles()
1999
        # add a base to get included
2000
        files.add_lines(self.get_simple_key('base'), (), ['base\n'])
2001
        # add a ancestor to be included on one side
2002
        files.add_lines(self.get_simple_key('lancestor'), (), ['lancestor\n'])
2003
        # add a ancestor to be included on the other side
2004
        files.add_lines(self.get_simple_key('rancestor'),
2005
            self.get_parents([self.get_simple_key('base')]), ['rancestor\n'])
2006
        # add a child of rancestor with no eofile-nl
2007
        files.add_lines(self.get_simple_key('child'),
2008
            self.get_parents([self.get_simple_key('rancestor')]),
2009
            ['base\n', 'child\n'])
2010
        # add a child of lancestor and base to join the two roots
2011
        files.add_lines(self.get_simple_key('otherchild'),
2012
            self.get_parents([self.get_simple_key('lancestor'),
2013
                self.get_simple_key('base')]),
2014
            ['base\n', 'lancestor\n', 'otherchild\n'])
2015
        def iter_with_keys(keys, expected):
2016
            # now we need to see what lines are returned, and how often.
2017
            lines = {}
2018
            progress = InstrumentedProgress()
2019
            # iterate over the lines
2020
            for line in files.iter_lines_added_or_present_in_keys(keys,
2021
                pb=progress):
2022
                lines.setdefault(line, 0)
2023
                lines[line] += 1
2024
            if []!= progress.updates:
2025
                self.assertEqual(expected, progress.updates)
2026
            return lines
2027
        lines = iter_with_keys(
2028
            [self.get_simple_key('child'), self.get_simple_key('otherchild')],
2029
            [('Walking content.', 0, 2),
2030
             ('Walking content.', 1, 2),
2031
             ('Walking content.', 2, 2)])
2032
        # we must see child and otherchild
2033
        self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2034
        self.assertTrue(
2035
            lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2036
        # we dont care if we got more than that.
2037
        
2038
        # test all lines
2039
        lines = iter_with_keys(files.keys(),
2040
            [('Walking content.', 0, 5),
2041
             ('Walking content.', 1, 5),
2042
             ('Walking content.', 2, 5),
2043
             ('Walking content.', 3, 5),
2044
             ('Walking content.', 4, 5),
2045
             ('Walking content.', 5, 5)])
2046
        # all lines must be seen at least once
2047
        self.assertTrue(lines[('base\n', self.get_simple_key('base'))] > 0)
2048
        self.assertTrue(
2049
            lines[('lancestor\n', self.get_simple_key('lancestor'))] > 0)
2050
        self.assertTrue(
2051
            lines[('rancestor\n', self.get_simple_key('rancestor'))] > 0)
2052
        self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2053
        self.assertTrue(
2054
            lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2055
2056
    def test_make_mpdiffs(self):
2057
        from bzrlib import multiparent
2058
        files = self.get_versionedfiles('source')
2059
        # add texts that should trip the knit maximum delta chain threshold
2060
        # as well as doing parallel chains of data in knits.
2061
        # this is done by two chains of 25 insertions
2062
        files.add_lines(self.get_simple_key('base'), [], ['line\n'])
2063
        files.add_lines(self.get_simple_key('noeol'),
2064
            self.get_parents([self.get_simple_key('base')]), ['line'])
2065
        # detailed eol tests:
2066
        # shared last line with parent no-eol
2067
        files.add_lines(self.get_simple_key('noeolsecond'),
2068
            self.get_parents([self.get_simple_key('noeol')]),
2069
                ['line\n', 'line'])
2070
        # differing last line with parent, both no-eol
2071
        files.add_lines(self.get_simple_key('noeolnotshared'),
2072
            self.get_parents([self.get_simple_key('noeolsecond')]),
2073
                ['line\n', 'phone'])
2074
        # add eol following a noneol parent, change content
2075
        files.add_lines(self.get_simple_key('eol'),
2076
            self.get_parents([self.get_simple_key('noeol')]), ['phone\n'])
2077
        # add eol following a noneol parent, no change content
2078
        files.add_lines(self.get_simple_key('eolline'),
2079
            self.get_parents([self.get_simple_key('noeol')]), ['line\n'])
2080
        # noeol with no parents:
2081
        files.add_lines(self.get_simple_key('noeolbase'), [], ['line'])
2082
        # noeol preceeding its leftmost parent in the output:
2083
        # this is done by making it a merge of two parents with no common
2084
        # anestry: noeolbase and noeol with the 
2085
        # later-inserted parent the leftmost.
2086
        files.add_lines(self.get_simple_key('eolbeforefirstparent'),
2087
            self.get_parents([self.get_simple_key('noeolbase'),
2088
                self.get_simple_key('noeol')]),
2089
            ['line'])
2090
        # two identical eol texts
2091
        files.add_lines(self.get_simple_key('noeoldup'),
2092
            self.get_parents([self.get_simple_key('noeol')]), ['line'])
2093
        next_parent = self.get_simple_key('base')
2094
        text_name = 'chain1-'
2095
        text = ['line\n']
2096
        sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
2097
                 1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
2098
                 2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
2099
                 3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
2100
                 4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
2101
                 5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
2102
                 6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
2103
                 7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
2104
                 8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
2105
                 9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
2106
                 10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
2107
                 11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
2108
                 12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
2109
                 13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
2110
                 14:'2c4b1736566b8ca6051e668de68650686a3922f2',
2111
                 15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
2112
                 16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
2113
                 17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
2114
                 18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
2115
                 19:'1ebed371807ba5935958ad0884595126e8c4e823',
2116
                 20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
2117
                 21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
2118
                 22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
2119
                 23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
2120
                 24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
2121
                 25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
2122
                 }
2123
        for depth in range(26):
2124
            new_version = self.get_simple_key(text_name + '%s' % depth)
2125
            text = text + ['line\n']
2126
            files.add_lines(new_version, self.get_parents([next_parent]), text)
2127
            next_parent = new_version
2128
        next_parent = self.get_simple_key('base')
2129
        text_name = 'chain2-'
2130
        text = ['line\n']
2131
        for depth in range(26):
2132
            new_version = self.get_simple_key(text_name + '%s' % depth)
2133
            text = text + ['line\n']
2134
            files.add_lines(new_version, self.get_parents([next_parent]), text)
2135
            next_parent = new_version
2136
        target = self.get_versionedfiles('target')
2137
        for key in multiparent.topo_iter_keys(files, files.keys()):
2138
            mpdiff = files.make_mpdiffs([key])[0]
2139
            parents = files.get_parent_map([key])[key] or []
2140
            target.add_mpdiffs(
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
2141
                [(key, parents, files.get_sha1s([key])[key], mpdiff)])
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2142
            self.assertEqualDiff(
2143
                files.get_record_stream([key], 'unordered',
2144
                    True).next().get_bytes_as('fulltext'),
2145
                target.get_record_stream([key], 'unordered',
2146
                    True).next().get_bytes_as('fulltext')
2147
                )
2148
2149
    def test_keys(self):
2150
        # While use is discouraged, versions() is still needed by aspects of
2151
        # bzr.
2152
        files = self.get_versionedfiles()
2153
        self.assertEqual(set(), set(files.keys()))
2154
        if self.key_length == 1:
2155
            key = ('foo',)
2156
        else:
2157
            key = ('foo', 'bar',)
2158
        files.add_lines(key, (), [])
2159
        self.assertEqual(set([key]), set(files.keys()))