/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar
2052.3.2 by John Arbash Meinel
Change Copyright .. by Canonical to Copyright ... Canonical
1
# Copyright (C) 2005 Canonical Ltd
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
2
#
3
# Authors:
4
#   Johan Rydberg <jrydberg@gnu.org>
5
#
6
# This program is free software; you can redistribute it and/or modify
7
# it under the terms of the GNU General Public License as published by
8
# the Free Software Foundation; either version 2 of the License, or
9
# (at your option) any later version.
1887.1.1 by Adeodato Simó
Do not separate paragraphs in the copyright statement with blank lines,
10
#
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
11
# This program is distributed in the hope that it will be useful,
12
# but WITHOUT ANY WARRANTY; without even the implied warranty of
13
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14
# GNU General Public License for more details.
1887.1.1 by Adeodato Simó
Do not separate paragraphs in the copyright statement with blank lines,
15
#
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
16
# You should have received a copy of the GNU General Public License
17
# along with this program; if not, write to the Free Software
18
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
20
1704.2.15 by Martin Pool
Remove TODO about knit testing printed from test suite
21
# TODO: might be nice to create a versionedfile with some type of corruption
22
# considered typical and check that it can be detected/corrected.
23
3350.3.16 by Robert Collins
Add test that out of order insertion fails with a clean error/does not fail.
24
from itertools import chain
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
25
from StringIO import StringIO
26
1563.2.6 by Robert Collins
Start check tests for knits (pending), and remove dead code.
27
import bzrlib
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
28
from bzrlib import (
29
    errors,
2309.4.7 by John Arbash Meinel
Update VersionedFile tests to ensure that they can take Unicode,
30
    osutils,
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
31
    progress,
32
    )
1563.2.11 by Robert Collins
Consolidate reweave and join as we have no separate usage, make reweave tests apply to all versionedfile implementations and deprecate the old reweave apis.
33
from bzrlib.errors import (
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
34
                           RevisionNotPresent,
1563.2.11 by Robert Collins
Consolidate reweave and join as we have no separate usage, make reweave tests apply to all versionedfile implementations and deprecate the old reweave apis.
35
                           RevisionAlreadyPresent,
36
                           WeaveParentMismatch
37
                           )
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
38
from bzrlib import knit as _mod_knit
2770.1.1 by Aaron Bentley
Initial implmentation of plain knit annotation
39
from bzrlib.knit import (
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
40
    cleanup_pack_knit,
41
    make_file_factory,
42
    make_pack_factory,
2770.1.1 by Aaron Bentley
Initial implmentation of plain knit annotation
43
    KnitAnnotateFactory,
2770.1.10 by Aaron Bentley
Merge bzr.dev
44
    KnitPlainFactory,
2770.1.1 by Aaron Bentley
Initial implmentation of plain knit annotation
45
    )
3350.3.14 by Robert Collins
Deprecate VersionedFile.join.
46
from bzrlib.symbol_versioning import one_four, one_five
3350.6.2 by Robert Collins
Prepare parameterised test environment.
47
from bzrlib.tests import (
48
    TestCaseWithMemoryTransport,
49
    TestScenarioApplier,
50
    TestSkipped,
51
    condition_isinstance,
52
    split_suite_by_condition,
53
    iter_suite_tests,
54
    )
3102.1.1 by Vincent Ladeuil
Rename bzrlib/test/HTTPTestUtils.py to bzrlib/tests/http_utils.py and fix
55
from bzrlib.tests.http_utils import TestCaseWithWebserver
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
56
from bzrlib.trace import mutter
1563.2.16 by Robert Collins
Change WeaveStore into VersionedFileStore and make its versoined file class parameterisable.
57
from bzrlib.transport import get_transport
1563.2.13 by Robert Collins
InterVersionedFile implemented.
58
from bzrlib.transport.memory import MemoryTransport
1684.3.1 by Robert Collins
Fix versioned file joins with empty targets.
59
from bzrlib.tsort import topo_sort
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
60
from bzrlib.tuned_gzip import GzipFile
1563.2.12 by Robert Collins
Checkpointing: created InterObject to factor out common inter object worker code, added InterVersionedFile and tests to allow making join work between any versionedfile.
61
import bzrlib.versionedfile as versionedfile
3350.6.2 by Robert Collins
Prepare parameterised test environment.
62
from bzrlib.versionedfile import (
63
    ConstantMapper,
64
    HashEscapedPrefixMapper,
65
    PrefixMapper,
66
    make_versioned_files_factory,
67
    )
1563.2.9 by Robert Collins
Update versionedfile api tests to ensure that data is available after every operation.
68
from bzrlib.weave import WeaveFile
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
69
from bzrlib.weavefile import read_weave, write_weave
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
70
71
3350.6.2 by Robert Collins
Prepare parameterised test environment.
72
def load_tests(standard_tests, module, loader):
73
    """Parameterize VersionedFiles tests for different implementations."""
74
    to_adapt, result = split_suite_by_condition(
75
        standard_tests, condition_isinstance(TestVersionedFiles))
76
    len_one_adapter = TestScenarioApplier()
77
    len_two_adapter = TestScenarioApplier()
78
    # We want to be sure of behaviour for:
79
    # weaves prefix layout (weave texts)
80
    # individually named weaves (weave inventories)
81
    # annotated knits - prefix|hash|hash-escape layout, we test the third only
82
    #                   as it is the most complex mapper.
83
    # individually named knits
84
    # individual no-graph knits in packs (signatures)
85
    # individual graph knits in packs (inventories)
86
    # individual graph nocompression knits in packs (revisions)
87
    # plain text knits in packs (texts)
88
    len_one_adapter.scenarios = [
89
        ('weave-named', {
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
90
            'cleanup':None,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
91
            'factory':make_versioned_files_factory(WeaveFile,
92
                ConstantMapper('inventory')),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
93
            'graph':True,
94
            'key_length':1,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
95
            }),
96
        ('named-knit', {
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
97
            'cleanup':None,
98
            'factory':make_file_factory(False, ConstantMapper('revisions')),
99
            'graph':True,
100
            'key_length':1,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
101
            }),
102
        ('named-nograph-knit-pack', {
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
103
            'cleanup':cleanup_pack_knit,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
104
            'factory':make_pack_factory(False, False, 1),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
105
            'graph':False,
106
            'key_length':1,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
107
            }),
108
        ('named-graph-knit-pack', {
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
109
            'cleanup':cleanup_pack_knit,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
110
            'factory':make_pack_factory(True, True, 1),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
111
            'graph':True,
112
            'key_length':1,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
113
            }),
114
        ('named-graph-nodelta-knit-pack', {
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
115
            'cleanup':cleanup_pack_knit,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
116
            'factory':make_pack_factory(True, False, 1),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
117
            'graph':True,
118
            'key_length':1,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
119
            }),
120
        ]
121
    len_two_adapter.scenarios = [
122
        ('weave-prefix', {
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
123
            'cleanup':None,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
124
            'factory':make_versioned_files_factory(WeaveFile,
125
                PrefixMapper()),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
126
            'graph':True,
127
            'key_length':2,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
128
            }),
129
        ('annotated-knit-escape', {
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
130
            'cleanup':None,
131
            'factory':make_file_factory(True, HashEscapedPrefixMapper()),
132
            'graph':True,
133
            'key_length':2,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
134
            }),
135
        ('plain-knit-pack', {
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
136
            'cleanup':cleanup_pack_knit,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
137
            'factory':make_pack_factory(True, True, 2),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
138
            'graph':True,
139
            'key_length':2,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
140
            }),
141
        ]
142
    for test in iter_suite_tests(to_adapt):
143
        result.addTests(len_one_adapter.adapt(test))
144
        result.addTests(len_two_adapter.adapt(test))
145
    return result
146
147
3350.3.11 by Robert Collins
Test inserting a stream that overlaps the current content of a knit does not error.
148
def get_diamond_vf(f, trailing_eol=True, left_only=False):
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
149
    """Get a diamond graph to exercise deltas and merges.
150
    
151
    :param trailing_eol: If True end the last line with \n.
152
    """
153
    parents = {
154
        'origin': (),
155
        'base': (('origin',),),
156
        'left': (('base',),),
157
        'right': (('base',),),
158
        'merged': (('left',), ('right',)),
159
        }
160
    # insert a diamond graph to exercise deltas and merges.
161
    if trailing_eol:
162
        last_char = '\n'
163
    else:
164
        last_char = ''
165
    f.add_lines('origin', [], ['origin' + last_char])
166
    f.add_lines('base', ['origin'], ['base' + last_char])
167
    f.add_lines('left', ['base'], ['base\n', 'left' + last_char])
3350.3.11 by Robert Collins
Test inserting a stream that overlaps the current content of a knit does not error.
168
    if not left_only:
169
        f.add_lines('right', ['base'],
170
            ['base\n', 'right' + last_char])
171
        f.add_lines('merged', ['left', 'right'],
172
            ['base\n', 'left\n', 'right\n', 'merged' + last_char])
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
173
    return f, parents
174
175
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
176
def get_diamond_files(files, key_length, trailing_eol=True, left_only=False,
177
    nograph=False):
178
    """Get a diamond graph to exercise deltas and merges.
179
180
    This creates a 5-node graph in files. If files supports 2-length keys two
181
    graphs are made to exercise the support for multiple ids.
182
    
183
    :param trailing_eol: If True end the last line with \n.
184
    :param key_length: The length of keys in files. Currently supports length 1
185
        and 2 keys.
186
    :param left_only: If True do not add the right and merged nodes.
187
    :param nograph: If True, do not provide parents to the add_lines calls;
188
        this is useful for tests that need inserted data but have graphless
189
        stores.
190
    :return: The results of the add_lines calls.
191
    """
192
    if key_length == 1:
193
        prefixes = [()]
194
    else:
195
        prefixes = [('FileA',), ('FileB',)]
196
    # insert a diamond graph to exercise deltas and merges.
197
    if trailing_eol:
198
        last_char = '\n'
199
    else:
200
        last_char = ''
201
    result = []
202
    def get_parents(suffix_list):
203
        if nograph:
204
            return ()
205
        else:
206
            result = [prefix + suffix for suffix in suffix_list]
207
            return result
208
    # we loop over each key because that spreads the inserts across prefixes,
209
    # which is how commit operates.
210
    for prefix in prefixes:
211
        result.append(files.add_lines(prefix + ('origin',), (),
212
            ['origin' + last_char]))
213
    for prefix in prefixes:
214
        result.append(files.add_lines(prefix + ('base',),
215
            get_parents([('origin',)]), ['base' + last_char]))
216
    for prefix in prefixes:
217
        result.append(files.add_lines(prefix + ('left',),
218
            get_parents([('base',)]),
219
            ['base\n', 'left' + last_char]))
220
    if not left_only:
221
        for prefix in prefixes:
222
            result.append(files.add_lines(prefix + ('right',),
223
                get_parents([('base',)]),
224
                ['base\n', 'right' + last_char]))
225
        for prefix in prefixes:
226
            result.append(files.add_lines(prefix + ('merged',),
227
                get_parents([('left',), ('right',)]),
228
                ['base\n', 'left\n', 'right\n', 'merged' + last_char]))
229
    return result
230
231
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
232
class VersionedFileTestMixIn(object):
233
    """A mixin test class for testing VersionedFiles.
234
235
    This is not an adaptor-style test at this point because
236
    theres no dynamic substitution of versioned file implementations,
237
    they are strictly controlled by their owning repositories.
238
    """
239
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
240
    def get_transaction(self):
241
        if not hasattr(self, '_transaction'):
242
            self._transaction = None
243
        return self._transaction
244
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
245
    def test_add(self):
246
        f = self.get_file()
247
        f.add_lines('r0', [], ['a\n', 'b\n'])
248
        f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
1563.2.9 by Robert Collins
Update versionedfile api tests to ensure that data is available after every operation.
249
        def verify_file(f):
250
            versions = f.versions()
251
            self.assertTrue('r0' in versions)
252
            self.assertTrue('r1' in versions)
253
            self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
254
            self.assertEquals(f.get_text('r0'), 'a\nb\n')
255
            self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
1563.2.18 by Robert Collins
get knit repositories really using knits for text storage.
256
            self.assertEqual(2, len(f))
257
            self.assertEqual(2, f.num_versions())
1563.2.9 by Robert Collins
Update versionedfile api tests to ensure that data is available after every operation.
258
    
259
            self.assertRaises(RevisionNotPresent,
260
                f.add_lines, 'r2', ['foo'], [])
261
            self.assertRaises(RevisionAlreadyPresent,
262
                f.add_lines, 'r1', [], [])
263
        verify_file(f)
1666.1.6 by Robert Collins
Make knit the default format.
264
        # this checks that reopen with create=True does not break anything.
265
        f = self.reopen_file(create=True)
1563.2.9 by Robert Collins
Update versionedfile api tests to ensure that data is available after every operation.
266
        verify_file(f)
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
267
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
268
    def test_adds_with_parent_texts(self):
269
        f = self.get_file()
270
        parent_texts = {}
2776.1.1 by Robert Collins
* The ``add_lines`` methods on ``VersionedFile`` implementations has changed
271
        _, _, parent_texts['r0'] = f.add_lines('r0', [], ['a\n', 'b\n'])
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
272
        try:
2776.1.1 by Robert Collins
* The ``add_lines`` methods on ``VersionedFile`` implementations has changed
273
            _, _, parent_texts['r1'] = f.add_lines_with_ghosts('r1',
274
                ['r0', 'ghost'], ['b\n', 'c\n'], parent_texts=parent_texts)
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
275
        except NotImplementedError:
276
            # if the format doesn't support ghosts, just add normally.
2776.1.1 by Robert Collins
* The ``add_lines`` methods on ``VersionedFile`` implementations has changed
277
            _, _, parent_texts['r1'] = f.add_lines('r1',
278
                ['r0'], ['b\n', 'c\n'], parent_texts=parent_texts)
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
279
        f.add_lines('r2', ['r1'], ['c\n', 'd\n'], parent_texts=parent_texts)
280
        self.assertNotEqual(None, parent_texts['r0'])
281
        self.assertNotEqual(None, parent_texts['r1'])
282
        def verify_file(f):
283
            versions = f.versions()
284
            self.assertTrue('r0' in versions)
285
            self.assertTrue('r1' in versions)
286
            self.assertTrue('r2' in versions)
287
            self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
288
            self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
289
            self.assertEquals(f.get_lines('r2'), ['c\n', 'd\n'])
290
            self.assertEqual(3, f.num_versions())
291
            origins = f.annotate('r1')
292
            self.assertEquals(origins[0][0], 'r0')
293
            self.assertEquals(origins[1][0], 'r1')
294
            origins = f.annotate('r2')
295
            self.assertEquals(origins[0][0], 'r1')
296
            self.assertEquals(origins[1][0], 'r2')
297
298
        verify_file(f)
299
        f = self.reopen_file()
300
        verify_file(f)
301
2805.6.7 by Robert Collins
Review feedback.
302
    def test_add_unicode_content(self):
303
        # unicode content is not permitted in versioned files. 
304
        # versioned files version sequences of bytes only.
305
        vf = self.get_file()
306
        self.assertRaises(errors.BzrBadParameterUnicode,
307
            vf.add_lines, 'a', [], ['a\n', u'b\n', 'c\n'])
308
        self.assertRaises(
309
            (errors.BzrBadParameterUnicode, NotImplementedError),
310
            vf.add_lines_with_ghosts, 'a', [], ['a\n', u'b\n', 'c\n'])
311
2520.4.150 by Aaron Bentley
Test that non-Weave uses left_matching_blocks for add_lines
312
    def test_add_follows_left_matching_blocks(self):
313
        """If we change left_matching_blocks, delta changes
314
315
        Note: There are multiple correct deltas in this case, because
316
        we start with 1 "a" and we get 3.
317
        """
318
        vf = self.get_file()
319
        if isinstance(vf, WeaveFile):
320
            raise TestSkipped("WeaveFile ignores left_matching_blocks")
321
        vf.add_lines('1', [], ['a\n'])
322
        vf.add_lines('2', ['1'], ['a\n', 'a\n', 'a\n'],
323
                     left_matching_blocks=[(0, 0, 1), (1, 3, 0)])
2794.1.2 by Robert Collins
Nuke versioned file add/get delta support, allowing easy simplification of unannotated Content, reducing memory copies and friction during commit on unannotated texts.
324
        self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('2'))
2520.4.150 by Aaron Bentley
Test that non-Weave uses left_matching_blocks for add_lines
325
        vf.add_lines('3', ['1'], ['a\n', 'a\n', 'a\n'],
326
                     left_matching_blocks=[(0, 2, 1), (1, 3, 0)])
2794.1.2 by Robert Collins
Nuke versioned file add/get delta support, allowing easy simplification of unannotated Content, reducing memory copies and friction during commit on unannotated texts.
327
        self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('3'))
2520.4.150 by Aaron Bentley
Test that non-Weave uses left_matching_blocks for add_lines
328
2805.6.7 by Robert Collins
Review feedback.
329
    def test_inline_newline_throws(self):
330
        # \r characters are not permitted in lines being added
331
        vf = self.get_file()
332
        self.assertRaises(errors.BzrBadParameterContainsNewline, 
333
            vf.add_lines, 'a', [], ['a\n\n'])
334
        self.assertRaises(
335
            (errors.BzrBadParameterContainsNewline, NotImplementedError),
336
            vf.add_lines_with_ghosts, 'a', [], ['a\n\n'])
337
        # but inline CR's are allowed
338
        vf.add_lines('a', [], ['a\r\n'])
339
        try:
340
            vf.add_lines_with_ghosts('b', [], ['a\r\n'])
341
        except NotImplementedError:
342
            pass
343
2229.2.1 by Aaron Bentley
Reject reserved ids in versiondfile, tree, branch and repository
344
    def test_add_reserved(self):
345
        vf = self.get_file()
346
        self.assertRaises(errors.ReservedId,
347
            vf.add_lines, 'a:', [], ['a\n', 'b\n', 'c\n'])
348
2794.1.1 by Robert Collins
Allow knits to be instructed not to add a text based on a sha, for commit.
349
    def test_add_lines_nostoresha(self):
350
        """When nostore_sha is supplied using old content raises."""
351
        vf = self.get_file()
352
        empty_text = ('a', [])
353
        sample_text_nl = ('b', ["foo\n", "bar\n"])
354
        sample_text_no_nl = ('c', ["foo\n", "bar"])
355
        shas = []
356
        for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
357
            sha, _, _ = vf.add_lines(version, [], lines)
358
            shas.append(sha)
359
        # we now have a copy of all the lines in the vf.
360
        for sha, (version, lines) in zip(
361
            shas, (empty_text, sample_text_nl, sample_text_no_nl)):
362
            self.assertRaises(errors.ExistingContent,
363
                vf.add_lines, version + "2", [], lines,
364
                nostore_sha=sha)
365
            # and no new version should have been added.
366
            self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
367
                version + "2")
368
2803.1.1 by Robert Collins
Fix typo in ghosts version of test_add_lines_nostoresha.
369
    def test_add_lines_with_ghosts_nostoresha(self):
2794.1.1 by Robert Collins
Allow knits to be instructed not to add a text based on a sha, for commit.
370
        """When nostore_sha is supplied using old content raises."""
371
        vf = self.get_file()
372
        empty_text = ('a', [])
373
        sample_text_nl = ('b', ["foo\n", "bar\n"])
374
        sample_text_no_nl = ('c', ["foo\n", "bar"])
375
        shas = []
376
        for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
377
            sha, _, _ = vf.add_lines(version, [], lines)
378
            shas.append(sha)
379
        # we now have a copy of all the lines in the vf.
380
        # is the test applicable to this vf implementation?
381
        try:
382
            vf.add_lines_with_ghosts('d', [], [])
383
        except NotImplementedError:
384
            raise TestSkipped("add_lines_with_ghosts is optional")
385
        for sha, (version, lines) in zip(
386
            shas, (empty_text, sample_text_nl, sample_text_no_nl)):
387
            self.assertRaises(errors.ExistingContent,
388
                vf.add_lines_with_ghosts, version + "2", [], lines,
389
                nostore_sha=sha)
390
            # and no new version should have been added.
391
            self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
392
                version + "2")
393
2776.1.1 by Robert Collins
* The ``add_lines`` methods on ``VersionedFile`` implementations has changed
394
    def test_add_lines_return_value(self):
395
        # add_lines should return the sha1 and the text size.
396
        vf = self.get_file()
397
        empty_text = ('a', [])
398
        sample_text_nl = ('b', ["foo\n", "bar\n"])
399
        sample_text_no_nl = ('c', ["foo\n", "bar"])
400
        # check results for the three cases:
401
        for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
402
            # the first two elements are the same for all versioned files:
403
            # - the digest and the size of the text. For some versioned files
404
            #   additional data is returned in additional tuple elements.
405
            result = vf.add_lines(version, [], lines)
406
            self.assertEqual(3, len(result))
407
            self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
408
                result[0:2])
409
        # parents should not affect the result:
410
        lines = sample_text_nl[1]
411
        self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
412
            vf.add_lines('d', ['b', 'c'], lines)[0:2])
413
2229.2.1 by Aaron Bentley
Reject reserved ids in versiondfile, tree, branch and repository
414
    def test_get_reserved(self):
415
        vf = self.get_file()
416
        self.assertRaises(errors.ReservedId, vf.get_texts, ['b:'])
417
        self.assertRaises(errors.ReservedId, vf.get_lines, 'b:')
418
        self.assertRaises(errors.ReservedId, vf.get_text, 'b:')
419
3468.2.4 by Martin Pool
Test and fix #234748 problems in trailing newline diffs
420
    def test_add_unchanged_last_line_noeol_snapshot(self):
421
        """Add a text with an unchanged last line with no eol should work."""
422
        # Test adding this in a number of chain lengths; because the interface
423
        # for VersionedFile does not allow forcing a specific chain length, we
424
        # just use a small base to get the first snapshot, then a much longer
425
        # first line for the next add (which will make the third add snapshot)
426
        # and so on. 20 has been chosen as an aribtrary figure - knits use 200
427
        # as a capped delta length, but ideally we would have some way of
428
        # tuning the test to the store (e.g. keep going until a snapshot
429
        # happens).
430
        for length in range(20):
431
            version_lines = {}
432
            vf = self.get_file('case-%d' % length)
433
            prefix = 'step-%d'
434
            parents = []
435
            for step in range(length):
436
                version = prefix % step
437
                lines = (['prelude \n'] * step) + ['line']
438
                vf.add_lines(version, parents, lines)
439
                version_lines[version] = lines
440
                parents = [version]
441
            vf.add_lines('no-eol', parents, ['line'])
442
            vf.get_texts(version_lines.keys())
443
            self.assertEqualDiff('line', vf.get_text('no-eol'))
444
445
    def test_get_texts_eol_variation(self):
446
        # similar to the failure in <http://bugs.launchpad.net/234748>
447
        vf = self.get_file()
448
        sample_text_nl = ["line\n"]
449
        sample_text_no_nl = ["line"]
450
        versions = []
451
        version_lines = {}
452
        parents = []
453
        for i in range(4):
454
            version = 'v%d' % i
455
            if i % 2:
456
                lines = sample_text_nl
457
            else:
458
                lines = sample_text_no_nl
459
            # left_matching blocks is an internal api; it operates on the
460
            # *internal* representation for a knit, which is with *all* lines
461
            # being normalised to end with \n - even the final line in a no_nl
462
            # file. Using it here ensures that a broken internal implementation
463
            # (which is what this test tests) will generate a correct line
464
            # delta (which is to say, an empty delta).
465
            vf.add_lines(version, parents, lines,
466
                left_matching_blocks=[(0, 0, 1)])
467
            parents = [version]
468
            versions.append(version)
469
            version_lines[version] = lines
470
        vf.check()
471
        vf.get_texts(versions)
472
        vf.get_texts(reversed(versions))
473
3460.2.1 by Robert Collins
* Inserting a bundle which changes the contents of a file with no trailing
474
    def test_add_lines_with_matching_blocks_noeol_last_line(self):
475
        """Add a text with an unchanged last line with no eol should work."""
476
        from bzrlib import multiparent
477
        # Hand verified sha1 of the text we're adding.
478
        sha1 = '6a1d115ec7b60afb664dc14890b5af5ce3c827a4'
479
        # Create a mpdiff which adds a new line before the trailing line, and
480
        # reuse the last line unaltered (which can cause annotation reuse).
481
        # Test adding this in two situations:
482
        # On top of a new insertion
483
        vf = self.get_file('fulltext')
484
        vf.add_lines('noeol', [], ['line'])
485
        vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
486
            left_matching_blocks=[(0, 1, 1)])
487
        self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
488
        # On top of a delta
489
        vf = self.get_file('delta')
490
        vf.add_lines('base', [], ['line'])
491
        vf.add_lines('noeol', ['base'], ['prelude\n', 'line'])
492
        vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
493
            left_matching_blocks=[(1, 1, 1)])
494
        self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
495
2520.4.85 by Aaron Bentley
Get all test passing (which just proves there aren't enough tests!)
496
    def test_make_mpdiffs(self):
2520.4.3 by Aaron Bentley
Implement plain strategy for extracting and installing multiparent diffs
497
        from bzrlib import multiparent
498
        vf = self.get_file('foo')
499
        sha1s = self._setup_for_deltas(vf)
500
        new_vf = self.get_file('bar')
501
        for version in multiparent.topo_iter(vf):
2520.4.85 by Aaron Bentley
Get all test passing (which just proves there aren't enough tests!)
502
            mpdiff = vf.make_mpdiffs([version])[0]
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
503
            new_vf.add_mpdiffs([(version, vf.get_parent_map([version])[version],
3316.2.9 by Robert Collins
* ``VersionedFile.get_sha1`` is deprecated, please use
504
                                 vf.get_sha1s([version])[0], mpdiff)])
2520.4.3 by Aaron Bentley
Implement plain strategy for extracting and installing multiparent diffs
505
            self.assertEqualDiff(vf.get_text(version),
506
                                 new_vf.get_text(version))
507
3453.3.2 by John Arbash Meinel
Add a test case for the first loop, unable to find a way to trigger the second loop
508
    def test_make_mpdiffs_with_ghosts(self):
509
        vf = self.get_file('foo')
3453.3.4 by John Arbash Meinel
Skip the new test for old weave formats that don't support ghosts
510
        try:
511
            vf.add_lines_with_ghosts('text', ['ghost'], ['line\n'])
512
        except NotImplementedError:
513
            # old Weave formats do not allow ghosts
514
            return
3453.3.2 by John Arbash Meinel
Add a test case for the first loop, unable to find a way to trigger the second loop
515
        self.assertRaises(errors.RevisionNotPresent, vf.make_mpdiffs, ['ghost'])
516
1596.2.38 by Robert Collins
rollback from using deltas to using fulltexts - deltas need more work to be ready.
517
    def _setup_for_deltas(self, f):
2794.1.2 by Robert Collins
Nuke versioned file add/get delta support, allowing easy simplification of unannotated Content, reducing memory copies and friction during commit on unannotated texts.
518
        self.assertFalse(f.has_version('base'))
1596.2.36 by Robert Collins
add a get_delta api to versioned_file.
519
        # add texts that should trip the knit maximum delta chain threshold
520
        # as well as doing parallel chains of data in knits.
521
        # this is done by two chains of 25 insertions
522
        f.add_lines('base', [], ['line\n'])
1596.2.38 by Robert Collins
rollback from using deltas to using fulltexts - deltas need more work to be ready.
523
        f.add_lines('noeol', ['base'], ['line'])
524
        # detailed eol tests:
525
        # shared last line with parent no-eol
526
        f.add_lines('noeolsecond', ['noeol'], ['line\n', 'line'])
527
        # differing last line with parent, both no-eol
528
        f.add_lines('noeolnotshared', ['noeolsecond'], ['line\n', 'phone'])
529
        # add eol following a noneol parent, change content
530
        f.add_lines('eol', ['noeol'], ['phone\n'])
531
        # add eol following a noneol parent, no change content
532
        f.add_lines('eolline', ['noeol'], ['line\n'])
533
        # noeol with no parents:
534
        f.add_lines('noeolbase', [], ['line'])
535
        # noeol preceeding its leftmost parent in the output:
536
        # this is done by making it a merge of two parents with no common
537
        # anestry: noeolbase and noeol with the 
538
        # later-inserted parent the leftmost.
539
        f.add_lines('eolbeforefirstparent', ['noeolbase', 'noeol'], ['line'])
540
        # two identical eol texts
541
        f.add_lines('noeoldup', ['noeol'], ['line'])
1596.2.36 by Robert Collins
add a get_delta api to versioned_file.
542
        next_parent = 'base'
543
        text_name = 'chain1-'
544
        text = ['line\n']
545
        sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
546
                 1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
547
                 2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
548
                 3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
549
                 4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
550
                 5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
551
                 6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
552
                 7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
553
                 8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
554
                 9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
555
                 10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
556
                 11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
557
                 12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
558
                 13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
559
                 14:'2c4b1736566b8ca6051e668de68650686a3922f2',
560
                 15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
561
                 16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
562
                 17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
563
                 18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
564
                 19:'1ebed371807ba5935958ad0884595126e8c4e823',
565
                 20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
566
                 21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
567
                 22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
568
                 23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
569
                 24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
570
                 25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
571
                 }
572
        for depth in range(26):
573
            new_version = text_name + '%s' % depth
574
            text = text + ['line\n']
575
            f.add_lines(new_version, [next_parent], text)
576
            next_parent = new_version
577
        next_parent = 'base'
578
        text_name = 'chain2-'
579
        text = ['line\n']
580
        for depth in range(26):
581
            new_version = text_name + '%s' % depth
582
            text = text + ['line\n']
583
            f.add_lines(new_version, [next_parent], text)
584
            next_parent = new_version
1596.2.38 by Robert Collins
rollback from using deltas to using fulltexts - deltas need more work to be ready.
585
        return sha1s
1596.2.37 by Robert Collins
Switch to delta based content copying in the generic versioned file copier.
586
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
587
    def test_ancestry(self):
588
        f = self.get_file()
1563.2.29 by Robert Collins
Remove all but fetch references to repository.revision_store.
589
        self.assertEqual([], f.get_ancestry([]))
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
590
        f.add_lines('r0', [], ['a\n', 'b\n'])
591
        f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
592
        f.add_lines('r2', ['r0'], ['b\n', 'c\n'])
593
        f.add_lines('r3', ['r2'], ['b\n', 'c\n'])
594
        f.add_lines('rM', ['r1', 'r2'], ['b\n', 'c\n'])
1563.2.29 by Robert Collins
Remove all but fetch references to repository.revision_store.
595
        self.assertEqual([], f.get_ancestry([]))
1563.2.35 by Robert Collins
cleanup deprecation warnings and finish conversion so the inventory is knit based too.
596
        versions = f.get_ancestry(['rM'])
597
        # there are some possibilities:
598
        # r0 r1 r2 rM r3
599
        # r0 r1 r2 r3 rM
600
        # etc
601
        # so we check indexes
602
        r0 = versions.index('r0')
603
        r1 = versions.index('r1')
604
        r2 = versions.index('r2')
605
        self.assertFalse('r3' in versions)
606
        rM = versions.index('rM')
607
        self.assertTrue(r0 < r1)
608
        self.assertTrue(r0 < r2)
609
        self.assertTrue(r1 < rM)
610
        self.assertTrue(r2 < rM)
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
611
612
        self.assertRaises(RevisionNotPresent,
613
            f.get_ancestry, ['rM', 'rX'])
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
614
2530.1.1 by Aaron Bentley
Make topological sorting optional for get_ancestry
615
        self.assertEqual(set(f.get_ancestry('rM')),
616
            set(f.get_ancestry('rM', topo_sorted=False)))
617
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
618
    def test_mutate_after_finish(self):
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
619
        self._transaction = 'before'
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
620
        f = self.get_file()
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
621
        self._transaction = 'after'
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
622
        self.assertRaises(errors.OutSideTransaction, f.add_lines, '', [], [])
623
        self.assertRaises(errors.OutSideTransaction, f.add_lines_with_ghosts, '', [], [])
1563.2.7 by Robert Collins
add versioned file clear_cache entry.
624
        
1563.2.15 by Robert Collins
remove the weavestore assumptions about the number and nature of files it manages.
625
    def test_copy_to(self):
626
        f = self.get_file()
627
        f.add_lines('0', [], ['a\n'])
628
        t = MemoryTransport()
629
        f.copy_to('foo', t)
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
630
        for suffix in self.get_factory().get_suffixes():
1563.2.15 by Robert Collins
remove the weavestore assumptions about the number and nature of files it manages.
631
            self.assertTrue(t.has('foo' + suffix))
632
633
    def test_get_suffixes(self):
634
        f = self.get_file()
635
        # and should be a list
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
636
        self.assertTrue(isinstance(self.get_factory().get_suffixes(), list))
1563.2.15 by Robert Collins
remove the weavestore assumptions about the number and nature of files it manages.
637
3287.5.1 by Robert Collins
Add VersionedFile.get_parent_map.
638
    def test_get_parent_map(self):
639
        f = self.get_file()
640
        f.add_lines('r0', [], ['a\n', 'b\n'])
641
        self.assertEqual(
642
            {'r0':()}, f.get_parent_map(['r0']))
643
        f.add_lines('r1', ['r0'], ['a\n', 'b\n'])
644
        self.assertEqual(
645
            {'r1':('r0',)}, f.get_parent_map(['r1']))
646
        self.assertEqual(
647
            {'r0':(),
648
             'r1':('r0',)},
649
            f.get_parent_map(['r0', 'r1']))
650
        f.add_lines('r2', [], ['a\n', 'b\n'])
651
        f.add_lines('r3', [], ['a\n', 'b\n'])
652
        f.add_lines('m', ['r0', 'r1', 'r2', 'r3'], ['a\n', 'b\n'])
653
        self.assertEqual(
654
            {'m':('r0', 'r1', 'r2', 'r3')}, f.get_parent_map(['m']))
655
        self.assertEqual({}, f.get_parent_map('y'))
656
        self.assertEqual(
657
            {'r0':(),
658
             'r1':('r0',)},
659
            f.get_parent_map(['r0', 'y', 'r1']))
660
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
661
    def test_annotate(self):
662
        f = self.get_file()
663
        f.add_lines('r0', [], ['a\n', 'b\n'])
664
        f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
665
        origins = f.annotate('r1')
666
        self.assertEquals(origins[0][0], 'r1')
667
        self.assertEquals(origins[1][0], 'r0')
668
669
        self.assertRaises(RevisionNotPresent,
670
            f.annotate, 'foo')
671
1563.2.6 by Robert Collins
Start check tests for knits (pending), and remove dead code.
672
    def test_detection(self):
673
        # Test weaves detect corruption.
674
        #
675
        # Weaves contain a checksum of their texts.
676
        # When a text is extracted, this checksum should be
677
        # verified.
678
679
        w = self.get_file_corrupted_text()
680
681
        self.assertEqual('hello\n', w.get_text('v1'))
682
        self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
683
        self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
684
        self.assertRaises(errors.WeaveInvalidChecksum, w.check)
685
686
        w = self.get_file_corrupted_checksum()
687
688
        self.assertEqual('hello\n', w.get_text('v1'))
689
        self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
690
        self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
691
        self.assertRaises(errors.WeaveInvalidChecksum, w.check)
692
693
    def get_file_corrupted_text(self):
694
        """Return a versioned file with corrupt text but valid metadata."""
695
        raise NotImplementedError(self.get_file_corrupted_text)
696
1563.2.9 by Robert Collins
Update versionedfile api tests to ensure that data is available after every operation.
697
    def reopen_file(self, name='foo'):
698
        """Open the versioned file from disk again."""
699
        raise NotImplementedError(self.reopen_file)
700
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
701
    def test_iter_lines_added_or_present_in_versions(self):
702
        # test that we get at least an equalset of the lines added by
703
        # versions in the weave 
704
        # the ordering here is to make a tree so that dumb searches have
705
        # more changes to muck up.
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
706
707
        class InstrumentedProgress(progress.DummyProgress):
708
709
            def __init__(self):
710
711
                progress.DummyProgress.__init__(self)
712
                self.updates = []
713
714
            def update(self, msg=None, current=None, total=None):
715
                self.updates.append((msg, current, total))
716
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
717
        vf = self.get_file()
718
        # add a base to get included
719
        vf.add_lines('base', [], ['base\n'])
720
        # add a ancestor to be included on one side
721
        vf.add_lines('lancestor', [], ['lancestor\n'])
722
        # add a ancestor to be included on the other side
723
        vf.add_lines('rancestor', ['base'], ['rancestor\n'])
724
        # add a child of rancestor with no eofile-nl
725
        vf.add_lines('child', ['rancestor'], ['base\n', 'child\n'])
726
        # add a child of lancestor and base to join the two roots
727
        vf.add_lines('otherchild',
728
                     ['lancestor', 'base'],
729
                     ['base\n', 'lancestor\n', 'otherchild\n'])
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
730
        def iter_with_versions(versions, expected):
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
731
            # now we need to see what lines are returned, and how often.
2975.3.1 by Robert Collins
Change (without backwards compatibility) the
732
            lines = {}
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
733
            progress = InstrumentedProgress()
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
734
            # iterate over the lines
2975.3.1 by Robert Collins
Change (without backwards compatibility) the
735
            for line in vf.iter_lines_added_or_present_in_versions(versions,
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
736
                pb=progress):
2975.3.1 by Robert Collins
Change (without backwards compatibility) the
737
                lines.setdefault(line, 0)
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
738
                lines[line] += 1
2975.3.1 by Robert Collins
Change (without backwards compatibility) the
739
            if []!= progress.updates:
2039.1.2 by Aaron Bentley
Tweak test to avoid catching assert
740
                self.assertEqual(expected, progress.updates)
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
741
            return lines
2147.1.3 by John Arbash Meinel
In knit.py we were re-using a variable in 2 loops, causing bogus progress messages to be generated.
742
        lines = iter_with_versions(['child', 'otherchild'],
743
                                   [('Walking content.', 0, 2),
744
                                    ('Walking content.', 1, 2),
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
745
                                    ('Walking content.', 2, 2)])
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
746
        # we must see child and otherchild
2975.3.1 by Robert Collins
Change (without backwards compatibility) the
747
        self.assertTrue(lines[('child\n', 'child')] > 0)
748
        self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
749
        # we dont care if we got more than that.
750
        
751
        # test all lines
2147.1.3 by John Arbash Meinel
In knit.py we were re-using a variable in 2 loops, causing bogus progress messages to be generated.
752
        lines = iter_with_versions(None, [('Walking content.', 0, 5),
753
                                          ('Walking content.', 1, 5),
754
                                          ('Walking content.', 2, 5),
755
                                          ('Walking content.', 3, 5),
756
                                          ('Walking content.', 4, 5),
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
757
                                          ('Walking content.', 5, 5)])
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
758
        # all lines must be seen at least once
2975.3.1 by Robert Collins
Change (without backwards compatibility) the
759
        self.assertTrue(lines[('base\n', 'base')] > 0)
760
        self.assertTrue(lines[('lancestor\n', 'lancestor')] > 0)
761
        self.assertTrue(lines[('rancestor\n', 'rancestor')] > 0)
762
        self.assertTrue(lines[('child\n', 'child')] > 0)
763
        self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
1594.2.7 by Robert Collins
Add versionedfile.fix_parents api for correcting data post hoc.
764
1594.2.8 by Robert Collins
add ghost aware apis to knits.
765
    def test_add_lines_with_ghosts(self):
766
        # some versioned file formats allow lines to be added with parent
767
        # information that is > than that in the format. Formats that do
768
        # not support this need to raise NotImplementedError on the
769
        # add_lines_with_ghosts api.
770
        vf = self.get_file()
771
        # add a revision with ghost parents
2249.5.12 by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8
772
        # The preferred form is utf8, but we should translate when needed
773
        parent_id_unicode = u'b\xbfse'
774
        parent_id_utf8 = parent_id_unicode.encode('utf8')
1594.2.8 by Robert Collins
add ghost aware apis to knits.
775
        try:
2309.4.7 by John Arbash Meinel
Update VersionedFile tests to ensure that they can take Unicode,
776
            vf.add_lines_with_ghosts('notbxbfse', [parent_id_utf8], [])
1594.2.8 by Robert Collins
add ghost aware apis to knits.
777
        except NotImplementedError:
778
            # check the other ghost apis are also not implemented
779
            self.assertRaises(NotImplementedError, vf.get_ancestry_with_ghosts, ['foo'])
780
            self.assertRaises(NotImplementedError, vf.get_parents_with_ghosts, 'foo')
781
            return
2150.2.1 by Robert Collins
Correctly decode utf8 revision ids from knits when parsing, fixes a regression where a unicode revision id is stored correctly, but then indexed by the utf8 value on the next invocation of bzr, rather than the unicode value.
782
        vf = self.reopen_file()
1594.2.8 by Robert Collins
add ghost aware apis to knits.
783
        # test key graph related apis: getncestry, _graph, get_parents
784
        # has_version
785
        # - these are ghost unaware and must not be reflect ghosts
2249.5.12 by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8
786
        self.assertEqual(['notbxbfse'], vf.get_ancestry('notbxbfse'))
787
        self.assertFalse(vf.has_version(parent_id_utf8))
1594.2.8 by Robert Collins
add ghost aware apis to knits.
788
        # we have _with_ghost apis to give us ghost information.
2249.5.12 by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8
789
        self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry_with_ghosts(['notbxbfse']))
790
        self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
1594.2.8 by Robert Collins
add ghost aware apis to knits.
791
        # if we add something that is a ghost of another, it should correct the
792
        # results of the prior apis
2858.2.1 by Martin Pool
Remove most calls to safe_file_id and safe_revision_id.
793
        vf.add_lines(parent_id_utf8, [], [])
2249.5.12 by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8
794
        self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry(['notbxbfse']))
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
795
        self.assertEqual({'notbxbfse':(parent_id_utf8,)},
796
            vf.get_parent_map(['notbxbfse']))
2249.5.12 by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8
797
        self.assertTrue(vf.has_version(parent_id_utf8))
1594.2.8 by Robert Collins
add ghost aware apis to knits.
798
        # we have _with_ghost apis to give us ghost information.
2858.2.1 by Martin Pool
Remove most calls to safe_file_id and safe_revision_id.
799
        self.assertEqual([parent_id_utf8, 'notbxbfse'],
800
            vf.get_ancestry_with_ghosts(['notbxbfse']))
2249.5.12 by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8
801
        self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
1594.2.8 by Robert Collins
add ghost aware apis to knits.
802
1594.2.9 by Robert Collins
Teach Knit repositories how to handle ghosts without corrupting at all.
803
    def test_add_lines_with_ghosts_after_normal_revs(self):
804
        # some versioned file formats allow lines to be added with parent
805
        # information that is > than that in the format. Formats that do
806
        # not support this need to raise NotImplementedError on the
807
        # add_lines_with_ghosts api.
808
        vf = self.get_file()
809
        # probe for ghost support
810
        try:
3287.6.5 by Robert Collins
Deprecate VersionedFile.has_ghost.
811
            vf.add_lines_with_ghosts('base', [], ['line\n', 'line_b\n'])
1594.2.9 by Robert Collins
Teach Knit repositories how to handle ghosts without corrupting at all.
812
        except NotImplementedError:
813
            return
814
        vf.add_lines_with_ghosts('references_ghost',
815
                                 ['base', 'a_ghost'],
816
                                 ['line\n', 'line_b\n', 'line_c\n'])
817
        origins = vf.annotate('references_ghost')
818
        self.assertEquals(('base', 'line\n'), origins[0])
819
        self.assertEquals(('base', 'line_b\n'), origins[1])
820
        self.assertEquals(('references_ghost', 'line_c\n'), origins[2])
1594.2.23 by Robert Collins
Test versioned file storage handling of clean/dirty status for accessed versioned files.
821
822
    def test_readonly_mode(self):
823
        transport = get_transport(self.get_url('.'))
824
        factory = self.get_factory()
825
        vf = factory('id', transport, 0777, create=True, access_mode='w')
826
        vf = factory('id', transport, access_mode='r')
827
        self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
828
        self.assertRaises(errors.ReadOnlyError,
829
                          vf.add_lines_with_ghosts,
830
                          'base',
831
                          [],
832
                          [])
1666.1.6 by Robert Collins
Make knit the default format.
833
    
3316.2.9 by Robert Collins
* ``VersionedFile.get_sha1`` is deprecated, please use
834
    def test_get_sha1s(self):
1666.1.6 by Robert Collins
Make knit the default format.
835
        # check the sha1 data is available
836
        vf = self.get_file()
837
        # a simple file
838
        vf.add_lines('a', [], ['a\n'])
839
        # the same file, different metadata
840
        vf.add_lines('b', ['a'], ['a\n'])
841
        # a file differing only in last newline.
842
        vf.add_lines('c', [], ['a'])
2520.4.89 by Aaron Bentley
Add get_sha1s to weaves
843
        self.assertEqual(['3f786850e387550fdab836ed7e6dc881de23001b',
844
                          '86f7e437faa5a7fce15d1ddcb9eaeaea377667b8',
845
                          '3f786850e387550fdab836ed7e6dc881de23001b'],
846
                          vf.get_sha1s(['a', 'c', 'b']))
1594.2.9 by Robert Collins
Teach Knit repositories how to handle ghosts without corrupting at all.
847
        
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
848
2535.3.1 by Andrew Bennetts
Add get_format_signature to VersionedFile
849
class TestWeave(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
850
851
    def get_file(self, name='foo'):
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
852
        return WeaveFile(name, get_transport(self.get_url('.')), create=True,
853
            get_scope=self.get_transaction)
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
854
1563.2.6 by Robert Collins
Start check tests for knits (pending), and remove dead code.
855
    def get_file_corrupted_text(self):
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
856
        w = WeaveFile('foo', get_transport(self.get_url('.')), create=True,
857
            get_scope=self.get_transaction)
1563.2.13 by Robert Collins
InterVersionedFile implemented.
858
        w.add_lines('v1', [], ['hello\n'])
859
        w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
1563.2.6 by Robert Collins
Start check tests for knits (pending), and remove dead code.
860
        
861
        # We are going to invasively corrupt the text
862
        # Make sure the internals of weave are the same
863
        self.assertEqual([('{', 0)
864
                        , 'hello\n'
865
                        , ('}', None)
866
                        , ('{', 1)
867
                        , 'there\n'
868
                        , ('}', None)
869
                        ], w._weave)
870
        
871
        self.assertEqual(['f572d396fae9206628714fb2ce00f72e94f2258f'
872
                        , '90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
873
                        ], w._sha1s)
874
        w.check()
875
        
876
        # Corrupted
877
        w._weave[4] = 'There\n'
878
        return w
879
880
    def get_file_corrupted_checksum(self):
881
        w = self.get_file_corrupted_text()
882
        # Corrected
883
        w._weave[4] = 'there\n'
884
        self.assertEqual('hello\nthere\n', w.get_text('v2'))
885
        
886
        #Invalid checksum, first digit changed
887
        w._sha1s[1] =  'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'
888
        return w
889
1666.1.6 by Robert Collins
Make knit the default format.
890
    def reopen_file(self, name='foo', create=False):
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
891
        return WeaveFile(name, get_transport(self.get_url('.')), create=create,
892
            get_scope=self.get_transaction)
1563.2.9 by Robert Collins
Update versionedfile api tests to ensure that data is available after every operation.
893
1563.2.25 by Robert Collins
Merge in upstream.
894
    def test_no_implicit_create(self):
895
        self.assertRaises(errors.NoSuchFile,
896
                          WeaveFile,
897
                          'foo',
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
898
                          get_transport(self.get_url('.')),
899
                          get_scope=self.get_transaction)
1563.2.25 by Robert Collins
Merge in upstream.
900
1594.2.23 by Robert Collins
Test versioned file storage handling of clean/dirty status for accessed versioned files.
901
    def get_factory(self):
902
        return WeaveFile
903
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
904
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
905
class TestPlanMergeVersionedFile(TestCaseWithMemoryTransport):
906
907
    def setUp(self):
908
        TestCaseWithMemoryTransport.setUp(self)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
909
        mapper = PrefixMapper()
910
        factory = make_file_factory(True, mapper)
911
        self.vf1 = factory(self.get_transport('root-1'))
912
        self.vf2 = factory(self.get_transport('root-2'))
913
        self.plan_merge_vf = versionedfile._PlanMergeVersionedFile('root')
914
        self.plan_merge_vf.fallback_versionedfiles.extend([self.vf1, self.vf2])
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
915
916
    def test_add_lines(self):
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
917
        self.plan_merge_vf.add_lines(('root', 'a:'), [], [])
918
        self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
919
            ('root', 'a'), [], [])
920
        self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
921
            ('root', 'a:'), None, [])
922
        self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
923
            ('root', 'a:'), [], None)
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
924
925
    def setup_abcde(self):
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
926
        self.vf1.add_lines(('root', 'A'), [], ['a'])
927
        self.vf1.add_lines(('root', 'B'), [('root', 'A')], ['b'])
928
        self.vf2.add_lines(('root', 'C'), [], ['c'])
929
        self.vf2.add_lines(('root', 'D'), [('root', 'C')], ['d'])
930
        self.plan_merge_vf.add_lines(('root', 'E:'),
931
            [('root', 'B'), ('root', 'D')], ['e'])
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
932
933
    def test_get_parents(self):
934
        self.setup_abcde()
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
935
        self.assertEqual({('root', 'B'):(('root', 'A'),)},
936
            self.plan_merge_vf.get_parent_map([('root', 'B')]))
937
        self.assertEqual({('root', 'D'):(('root', 'C'),)},
938
            self.plan_merge_vf.get_parent_map([('root', 'D')]))
939
        self.assertEqual({('root', 'E:'):(('root', 'B'),('root', 'D'))},
940
            self.plan_merge_vf.get_parent_map([('root', 'E:')]))
941
        self.assertEqual({},
942
            self.plan_merge_vf.get_parent_map([('root', 'F')]))
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
943
        self.assertEqual({
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
944
                ('root', 'B'):(('root', 'A'),),
945
                ('root', 'D'):(('root', 'C'),),
946
                ('root', 'E:'):(('root', 'B'),('root', 'D')),
947
                },
948
            self.plan_merge_vf.get_parent_map(
949
                [('root', 'B'), ('root', 'D'), ('root', 'E:'), ('root', 'F')]))
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
950
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
951
    def test_get_record_stream(self):
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
952
        self.setup_abcde()
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
953
        def get_record(suffix):
954
            return self.plan_merge_vf.get_record_stream(
955
                [('root', suffix)], 'unordered', True).next()
956
        self.assertEqual('a', get_record('A').get_bytes_as('fulltext'))
957
        self.assertEqual('c', get_record('C').get_bytes_as('fulltext'))
958
        self.assertEqual('e', get_record('E:').get_bytes_as('fulltext'))
959
        self.assertEqual('absent', get_record('F').storage_kind)
1666.1.1 by Robert Collins
Add trivial http-using test for versioned files.
960
961
962
class TestReadonlyHttpMixin(object):
963
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
964
    def get_transaction(self):
965
        return 1
966
1666.1.1 by Robert Collins
Add trivial http-using test for versioned files.
967
    def test_readonly_http_works(self):
968
        # we should be able to read from http with a versioned file.
969
        vf = self.get_file()
1666.1.6 by Robert Collins
Make knit the default format.
970
        # try an empty file access
971
        readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
972
        self.assertEqual([], readonly_vf.versions())
973
        # now with feeling.
1666.1.1 by Robert Collins
Add trivial http-using test for versioned files.
974
        vf.add_lines('1', [], ['a\n'])
975
        vf.add_lines('2', ['1'], ['b\n', 'a\n'])
976
        readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
1666.1.6 by Robert Collins
Make knit the default format.
977
        self.assertEqual(['1', '2'], vf.versions())
1666.1.1 by Robert Collins
Add trivial http-using test for versioned files.
978
        for version in readonly_vf.versions():
979
            readonly_vf.get_lines(version)
980
981
982
class TestWeaveHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
983
984
    def get_file(self):
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
985
        return WeaveFile('foo', get_transport(self.get_url('.')), create=True,
986
            get_scope=self.get_transaction)
1666.1.1 by Robert Collins
Add trivial http-using test for versioned files.
987
988
    def get_factory(self):
989
        return WeaveFile
990
991
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
992
class MergeCasesMixin(object):
993
994
    def doMerge(self, base, a, b, mp):
995
        from cStringIO import StringIO
996
        from textwrap import dedent
997
998
        def addcrlf(x):
999
            return x + '\n'
1000
        
1001
        w = self.get_file()
1002
        w.add_lines('text0', [], map(addcrlf, base))
1003
        w.add_lines('text1', ['text0'], map(addcrlf, a))
1004
        w.add_lines('text2', ['text0'], map(addcrlf, b))
1005
1006
        self.log_contents(w)
1007
1008
        self.log('merge plan:')
1009
        p = list(w.plan_merge('text1', 'text2'))
1010
        for state, line in p:
1011
            if line:
1012
                self.log('%12s | %s' % (state, line[:-1]))
1013
1014
        self.log('merge:')
1015
        mt = StringIO()
1016
        mt.writelines(w.weave_merge(p))
1017
        mt.seek(0)
1018
        self.log(mt.getvalue())
1019
1020
        mp = map(addcrlf, mp)
1021
        self.assertEqual(mt.readlines(), mp)
1022
        
1023
        
1024
    def testOneInsert(self):
1025
        self.doMerge([],
1026
                     ['aa'],
1027
                     [],
1028
                     ['aa'])
1029
1030
    def testSeparateInserts(self):
1031
        self.doMerge(['aaa', 'bbb', 'ccc'],
1032
                     ['aaa', 'xxx', 'bbb', 'ccc'],
1033
                     ['aaa', 'bbb', 'yyy', 'ccc'],
1034
                     ['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1035
1036
    def testSameInsert(self):
1037
        self.doMerge(['aaa', 'bbb', 'ccc'],
1038
                     ['aaa', 'xxx', 'bbb', 'ccc'],
1039
                     ['aaa', 'xxx', 'bbb', 'yyy', 'ccc'],
1040
                     ['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1041
    overlappedInsertExpected = ['aaa', 'xxx', 'yyy', 'bbb']
1042
    def testOverlappedInsert(self):
1043
        self.doMerge(['aaa', 'bbb'],
1044
                     ['aaa', 'xxx', 'yyy', 'bbb'],
1045
                     ['aaa', 'xxx', 'bbb'], self.overlappedInsertExpected)
1046
1047
        # really it ought to reduce this to 
1048
        # ['aaa', 'xxx', 'yyy', 'bbb']
1049
1050
1051
    def testClashReplace(self):
1052
        self.doMerge(['aaa'],
1053
                     ['xxx'],
1054
                     ['yyy', 'zzz'],
1055
                     ['<<<<<<< ', 'xxx', '=======', 'yyy', 'zzz', 
1056
                      '>>>>>>> '])
1057
1058
    def testNonClashInsert1(self):
1059
        self.doMerge(['aaa'],
1060
                     ['xxx', 'aaa'],
1061
                     ['yyy', 'zzz'],
1062
                     ['<<<<<<< ', 'xxx', 'aaa', '=======', 'yyy', 'zzz', 
1063
                      '>>>>>>> '])
1064
1065
    def testNonClashInsert2(self):
1066
        self.doMerge(['aaa'],
1067
                     ['aaa'],
1068
                     ['yyy', 'zzz'],
1069
                     ['yyy', 'zzz'])
1070
1071
1072
    def testDeleteAndModify(self):
1073
        """Clashing delete and modification.
1074
1075
        If one side modifies a region and the other deletes it then
1076
        there should be a conflict with one side blank.
1077
        """
1078
1079
        #######################################
1080
        # skippd, not working yet
1081
        return
1082
        
1083
        self.doMerge(['aaa', 'bbb', 'ccc'],
1084
                     ['aaa', 'ddd', 'ccc'],
1085
                     ['aaa', 'ccc'],
1086
                     ['<<<<<<<< ', 'aaa', '=======', '>>>>>>> ', 'ccc'])
1087
1088
    def _test_merge_from_strings(self, base, a, b, expected):
1089
        w = self.get_file()
1090
        w.add_lines('text0', [], base.splitlines(True))
1091
        w.add_lines('text1', ['text0'], a.splitlines(True))
1092
        w.add_lines('text2', ['text0'], b.splitlines(True))
1093
        self.log('merge plan:')
1094
        p = list(w.plan_merge('text1', 'text2'))
1095
        for state, line in p:
1096
            if line:
1097
                self.log('%12s | %s' % (state, line[:-1]))
1098
        self.log('merge result:')
1099
        result_text = ''.join(w.weave_merge(p))
1100
        self.log(result_text)
1101
        self.assertEqualDiff(result_text, expected)
1102
1103
    def test_weave_merge_conflicts(self):
1104
        # does weave merge properly handle plans that end with unchanged?
1105
        result = ''.join(self.get_file().weave_merge([('new-a', 'hello\n')]))
1106
        self.assertEqual(result, 'hello\n')
1107
1108
    def test_deletion_extended(self):
1109
        """One side deletes, the other deletes more.
1110
        """
1111
        base = """\
1112
            line 1
1113
            line 2
1114
            line 3
1115
            """
1116
        a = """\
1117
            line 1
1118
            line 2
1119
            """
1120
        b = """\
1121
            line 1
1122
            """
1123
        result = """\
1124
            line 1
1125
            """
1126
        self._test_merge_from_strings(base, a, b, result)
1127
1128
    def test_deletion_overlap(self):
1129
        """Delete overlapping regions with no other conflict.
1130
1131
        Arguably it'd be better to treat these as agreement, rather than 
1132
        conflict, but for now conflict is safer.
1133
        """
1134
        base = """\
1135
            start context
1136
            int a() {}
1137
            int b() {}
1138
            int c() {}
1139
            end context
1140
            """
1141
        a = """\
1142
            start context
1143
            int a() {}
1144
            end context
1145
            """
1146
        b = """\
1147
            start context
1148
            int c() {}
1149
            end context
1150
            """
1151
        result = """\
1152
            start context
1153
<<<<<<< 
1154
            int a() {}
1155
=======
1156
            int c() {}
1157
>>>>>>> 
1158
            end context
1159
            """
1160
        self._test_merge_from_strings(base, a, b, result)
1161
1162
    def test_agreement_deletion(self):
1163
        """Agree to delete some lines, without conflicts."""
1164
        base = """\
1165
            start context
1166
            base line 1
1167
            base line 2
1168
            end context
1169
            """
1170
        a = """\
1171
            start context
1172
            base line 1
1173
            end context
1174
            """
1175
        b = """\
1176
            start context
1177
            base line 1
1178
            end context
1179
            """
1180
        result = """\
1181
            start context
1182
            base line 1
1183
            end context
1184
            """
1185
        self._test_merge_from_strings(base, a, b, result)
1186
1187
    def test_sync_on_deletion(self):
1188
        """Specific case of merge where we can synchronize incorrectly.
1189
        
1190
        A previous version of the weave merge concluded that the two versions
1191
        agreed on deleting line 2, and this could be a synchronization point.
1192
        Line 1 was then considered in isolation, and thought to be deleted on 
1193
        both sides.
1194
1195
        It's better to consider the whole thing as a disagreement region.
1196
        """
1197
        base = """\
1198
            start context
1199
            base line 1
1200
            base line 2
1201
            end context
1202
            """
1203
        a = """\
1204
            start context
1205
            base line 1
1206
            a's replacement line 2
1207
            end context
1208
            """
1209
        b = """\
1210
            start context
1211
            b replaces
1212
            both lines
1213
            end context
1214
            """
1215
        result = """\
1216
            start context
1217
<<<<<<< 
1218
            base line 1
1219
            a's replacement line 2
1220
=======
1221
            b replaces
1222
            both lines
1223
>>>>>>> 
1224
            end context
1225
            """
1226
        self._test_merge_from_strings(base, a, b, result)
1227
1228
2535.3.1 by Andrew Bennetts
Add get_format_signature to VersionedFile
1229
class TestWeaveMerge(TestCaseWithMemoryTransport, MergeCasesMixin):
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1230
1231
    def get_file(self, name='foo'):
1232
        return WeaveFile(name, get_transport(self.get_url('.')), create=True)
1233
1234
    def log_contents(self, w):
1235
        self.log('weave is:')
1236
        tmpf = StringIO()
1237
        write_weave(w, tmpf)
1238
        self.log(tmpf.getvalue())
1239
1240
    overlappedInsertExpected = ['aaa', '<<<<<<< ', 'xxx', 'yyy', '=======', 
1241
                                'xxx', '>>>>>>> ', 'bbb']
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1242
1243
1244
class TestContentFactoryAdaption(TestCaseWithMemoryTransport):
1245
1246
    def test_select_adaptor(self):
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
1247
        """Test expected adapters exist."""
1248
        # One scenario for each lookup combination we expect to use.
1249
        # Each is source_kind, requested_kind, adapter class
1250
        scenarios = [
1251
            ('knit-delta-gz', 'fulltext', _mod_knit.DeltaPlainToFullText),
1252
            ('knit-ft-gz', 'fulltext', _mod_knit.FTPlainToFullText),
1253
            ('knit-annotated-delta-gz', 'knit-delta-gz',
1254
                _mod_knit.DeltaAnnotatedToUnannotated),
1255
            ('knit-annotated-delta-gz', 'fulltext',
1256
                _mod_knit.DeltaAnnotatedToFullText),
1257
            ('knit-annotated-ft-gz', 'knit-ft-gz',
1258
                _mod_knit.FTAnnotatedToUnannotated),
1259
            ('knit-annotated-ft-gz', 'fulltext',
1260
                _mod_knit.FTAnnotatedToFullText),
1261
            ]
1262
        for source, requested, klass in scenarios:
1263
            adapter_factory = versionedfile.adapter_registry.get(
1264
                (source, requested))
1265
            adapter = adapter_factory(None)
1266
            self.assertIsInstance(adapter, klass)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1267
3350.3.5 by Robert Collins
Create adapters from plain compressed knit content.
1268
    def get_knit(self, annotated=True):
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1269
        mapper = ConstantMapper('knit')
1270
        transport = self.get_transport()
1271
        return make_file_factory(annotated, mapper)(transport)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1272
1273
    def helpGetBytes(self, f, ft_adapter, delta_adapter):
3350.3.22 by Robert Collins
Review feedback.
1274
        """Grab the interested adapted texts for tests."""
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1275
        # origin is a fulltext
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1276
        entries = f.get_record_stream([('origin',)], 'unordered', False)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1277
        base = entries.next()
1278
        ft_data = ft_adapter.get_bytes(base, base.get_bytes_as(base.storage_kind))
1279
        # merged is both a delta and multiple parents.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1280
        entries = f.get_record_stream([('merged',)], 'unordered', False)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1281
        merged = entries.next()
1282
        delta_data = delta_adapter.get_bytes(merged,
1283
            merged.get_bytes_as(merged.storage_kind))
1284
        return ft_data, delta_data
1285
1286
    def test_deannotation_noeol(self):
1287
        """Test converting annotated knits to unannotated knits."""
1288
        # we need a full text, and a delta
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1289
        f = self.get_knit()
1290
        get_diamond_files(f, 1, trailing_eol=False)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1291
        ft_data, delta_data = self.helpGetBytes(f,
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
1292
            _mod_knit.FTAnnotatedToUnannotated(None),
1293
            _mod_knit.DeltaAnnotatedToUnannotated(None))
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1294
        self.assertEqual(
1295
            'version origin 1 b284f94827db1fa2970d9e2014f080413b547a7e\n'
1296
            'origin\n'
1297
            'end origin\n',
1298
            GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1299
        self.assertEqual(
1300
            'version merged 4 32c2e79763b3f90e8ccde37f9710b6629c25a796\n'
1301
            '1,2,3\nleft\nright\nmerged\nend merged\n',
1302
            GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1303
1304
    def test_deannotation(self):
1305
        """Test converting annotated knits to unannotated knits."""
1306
        # we need a full text, and a delta
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1307
        f = self.get_knit()
1308
        get_diamond_files(f, 1)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1309
        ft_data, delta_data = self.helpGetBytes(f,
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
1310
            _mod_knit.FTAnnotatedToUnannotated(None),
1311
            _mod_knit.DeltaAnnotatedToUnannotated(None))
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1312
        self.assertEqual(
1313
            'version origin 1 00e364d235126be43292ab09cb4686cf703ddc17\n'
1314
            'origin\n'
1315
            'end origin\n',
1316
            GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1317
        self.assertEqual(
1318
            'version merged 3 ed8bce375198ea62444dc71952b22cfc2b09226d\n'
1319
            '2,2,2\nright\nmerged\nend merged\n',
1320
            GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1321
1322
    def test_annotated_to_fulltext_no_eol(self):
1323
        """Test adapting annotated knits to full texts (for -> weaves)."""
1324
        # we need a full text, and a delta
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1325
        f = self.get_knit()
1326
        get_diamond_files(f, 1, trailing_eol=False)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1327
        # Reconstructing a full text requires a backing versioned file, and it
1328
        # must have the base lines requested from it.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1329
        logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1330
        ft_data, delta_data = self.helpGetBytes(f,
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
1331
            _mod_knit.FTAnnotatedToFullText(None),
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1332
            _mod_knit.DeltaAnnotatedToFullText(logged_vf))
1333
        self.assertEqual('origin', ft_data)
1334
        self.assertEqual('base\nleft\nright\nmerged', delta_data)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1335
        self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1336
            True)], logged_vf.calls)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1337
1338
    def test_annotated_to_fulltext(self):
1339
        """Test adapting annotated knits to full texts (for -> weaves)."""
1340
        # we need a full text, and a delta
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1341
        f = self.get_knit()
1342
        get_diamond_files(f, 1)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1343
        # Reconstructing a full text requires a backing versioned file, and it
1344
        # must have the base lines requested from it.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1345
        logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1346
        ft_data, delta_data = self.helpGetBytes(f,
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
1347
            _mod_knit.FTAnnotatedToFullText(None),
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1348
            _mod_knit.DeltaAnnotatedToFullText(logged_vf))
1349
        self.assertEqual('origin\n', ft_data)
1350
        self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1351
        self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1352
            True)], logged_vf.calls)
3350.3.5 by Robert Collins
Create adapters from plain compressed knit content.
1353
1354
    def test_unannotated_to_fulltext(self):
1355
        """Test adapting unannotated knits to full texts.
1356
        
1357
        This is used for -> weaves, and for -> annotated knits.
1358
        """
1359
        # we need a full text, and a delta
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1360
        f = self.get_knit(annotated=False)
1361
        get_diamond_files(f, 1)
3350.3.5 by Robert Collins
Create adapters from plain compressed knit content.
1362
        # Reconstructing a full text requires a backing versioned file, and it
1363
        # must have the base lines requested from it.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1364
        logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
3350.3.5 by Robert Collins
Create adapters from plain compressed knit content.
1365
        ft_data, delta_data = self.helpGetBytes(f,
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
1366
            _mod_knit.FTPlainToFullText(None),
3350.3.5 by Robert Collins
Create adapters from plain compressed knit content.
1367
            _mod_knit.DeltaPlainToFullText(logged_vf))
1368
        self.assertEqual('origin\n', ft_data)
1369
        self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1370
        self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1371
            True)], logged_vf.calls)
3350.3.5 by Robert Collins
Create adapters from plain compressed knit content.
1372
3350.3.6 by Robert Collins
Test EOL behaviour of plain knit record adapters.
1373
    def test_unannotated_to_fulltext_no_eol(self):
1374
        """Test adapting unannotated knits to full texts.
1375
        
1376
        This is used for -> weaves, and for -> annotated knits.
1377
        """
1378
        # we need a full text, and a delta
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1379
        f = self.get_knit(annotated=False)
1380
        get_diamond_files(f, 1, trailing_eol=False)
3350.3.6 by Robert Collins
Test EOL behaviour of plain knit record adapters.
1381
        # Reconstructing a full text requires a backing versioned file, and it
1382
        # must have the base lines requested from it.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1383
        logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
3350.3.6 by Robert Collins
Test EOL behaviour of plain knit record adapters.
1384
        ft_data, delta_data = self.helpGetBytes(f,
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
1385
            _mod_knit.FTPlainToFullText(None),
3350.3.6 by Robert Collins
Test EOL behaviour of plain knit record adapters.
1386
            _mod_knit.DeltaPlainToFullText(logged_vf))
1387
        self.assertEqual('origin', ft_data)
1388
        self.assertEqual('base\nleft\nright\nmerged', delta_data)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1389
        self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1390
            True)], logged_vf.calls)
3350.3.6 by Robert Collins
Test EOL behaviour of plain knit record adapters.
1391
3350.6.1 by Robert Collins
* New ``versionedfile.KeyMapper`` interface to abstract out the access to
1392
1393
class TestKeyMapper(TestCaseWithMemoryTransport):
1394
    """Tests for various key mapping logic."""
1395
1396
    def test_identity_mapper(self):
1397
        mapper = versionedfile.ConstantMapper("inventory")
1398
        self.assertEqual("inventory", mapper.map(('foo@ar',)))
1399
        self.assertEqual("inventory", mapper.map(('quux',)))
1400
1401
    def test_prefix_mapper(self):
1402
        #format5: plain
1403
        mapper = versionedfile.PrefixMapper()
1404
        self.assertEqual("file-id", mapper.map(("file-id", "revision-id")))
1405
        self.assertEqual("new-id", mapper.map(("new-id", "revision-id")))
1406
        self.assertEqual(('file-id',), mapper.unmap("file-id"))
1407
        self.assertEqual(('new-id',), mapper.unmap("new-id"))
1408
1409
    def test_hash_prefix_mapper(self):
1410
        #format6: hash + plain
1411
        mapper = versionedfile.HashPrefixMapper()
1412
        self.assertEqual("9b/file-id", mapper.map(("file-id", "revision-id")))
1413
        self.assertEqual("45/new-id", mapper.map(("new-id", "revision-id")))
1414
        self.assertEqual(('file-id',), mapper.unmap("9b/file-id"))
1415
        self.assertEqual(('new-id',), mapper.unmap("45/new-id"))
1416
1417
    def test_hash_escaped_mapper(self):
1418
        #knit1: hash + escaped
1419
        mapper = versionedfile.HashEscapedPrefixMapper()
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1420
        self.assertEqual("88/%2520", mapper.map((" ", "revision-id")))
1421
        self.assertEqual("ed/fil%2545-%2549d", mapper.map(("filE-Id",
1422
            "revision-id")))
1423
        self.assertEqual("88/ne%2557-%2549d", mapper.map(("neW-Id",
1424
            "revision-id")))
1425
        self.assertEqual(('filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))
1426
        self.assertEqual(('neW-Id',), mapper.unmap("88/ne%2557-%2549d"))
3350.6.2 by Robert Collins
Prepare parameterised test environment.
1427
1428
1429
class TestVersionedFiles(TestCaseWithMemoryTransport):
1430
    """Tests for the multiple-file variant of VersionedFile."""
1431
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1432
    def get_versionedfiles(self, relpath='files'):
1433
        transport = self.get_transport(relpath)
1434
        if relpath != '.':
1435
            transport.mkdir('.')
1436
        files = self.factory(transport)
1437
        if self.cleanup is not None:
1438
            self.addCleanup(lambda:self.cleanup(files))
1439
        return files
1440
1441
    def test_annotate(self):
1442
        files = self.get_versionedfiles()
1443
        self.get_diamond_files(files)
1444
        if self.key_length == 1:
1445
            prefix = ()
1446
        else:
1447
            prefix = ('FileA',)
1448
        # introduced full text
1449
        origins = files.annotate(prefix + ('origin',))
1450
        self.assertEqual([
1451
            (prefix + ('origin',), 'origin\n')],
1452
            origins)
1453
        # a delta
1454
        origins = files.annotate(prefix + ('base',))
1455
        self.assertEqual([
1456
            (prefix + ('base',), 'base\n')],
1457
            origins)
1458
        # a merge
1459
        origins = files.annotate(prefix + ('merged',))
1460
        if self.graph:
1461
            self.assertEqual([
1462
                (prefix + ('base',), 'base\n'),
1463
                (prefix + ('left',), 'left\n'),
1464
                (prefix + ('right',), 'right\n'),
1465
                (prefix + ('merged',), 'merged\n')
1466
                ],
1467
                origins)
1468
        else:
1469
            # Without a graph everything is new.
1470
            self.assertEqual([
1471
                (prefix + ('merged',), 'base\n'),
1472
                (prefix + ('merged',), 'left\n'),
1473
                (prefix + ('merged',), 'right\n'),
1474
                (prefix + ('merged',), 'merged\n')
1475
                ],
1476
                origins)
1477
        self.assertRaises(RevisionNotPresent,
1478
            files.annotate, prefix + ('missing-key',))
1479
3350.6.2 by Robert Collins
Prepare parameterised test environment.
1480
    def test_construct(self):
1481
        """Each parameterised test can be constructed on a transport."""
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1482
        files = self.get_versionedfiles()
1483
1484
    def get_diamond_files(self, files, trailing_eol=True, left_only=False):
1485
        return get_diamond_files(files, self.key_length,
1486
            trailing_eol=trailing_eol, nograph=not self.graph,
1487
            left_only=left_only)
1488
1489
    def test_add_lines_return(self):
1490
        files = self.get_versionedfiles()
1491
        # save code by using the stock data insertion helper.
1492
        adds = self.get_diamond_files(files)
1493
        results = []
1494
        # We can only validate the first 2 elements returned from add_lines.
1495
        for add in adds:
1496
            self.assertEqual(3, len(add))
1497
            results.append(add[:2])
1498
        if self.key_length == 1:
1499
            self.assertEqual([
1500
                ('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1501
                ('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1502
                ('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1503
                ('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1504
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1505
                results)
1506
        elif self.key_length == 2:
1507
            self.assertEqual([
1508
                ('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1509
                ('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1510
                ('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1511
                ('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1512
                ('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1513
                ('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1514
                ('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1515
                ('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1516
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1517
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1518
                results)
1519
1520
    def test_empty_lines(self):
1521
        """Empty files can be stored."""
1522
        f = self.get_versionedfiles()
1523
        key_a = self.get_simple_key('a')
1524
        f.add_lines(key_a, [], [])
1525
        self.assertEqual('',
1526
            f.get_record_stream([key_a], 'unordered', True
1527
                ).next().get_bytes_as('fulltext'))
1528
        key_b = self.get_simple_key('b')
1529
        f.add_lines(key_b, self.get_parents([key_a]), [])
1530
        self.assertEqual('',
1531
            f.get_record_stream([key_b], 'unordered', True
1532
                ).next().get_bytes_as('fulltext'))
1533
1534
    def test_newline_only(self):
1535
        f = self.get_versionedfiles()
1536
        key_a = self.get_simple_key('a')
1537
        f.add_lines(key_a, [], ['\n'])
1538
        self.assertEqual('\n',
1539
            f.get_record_stream([key_a], 'unordered', True
1540
                ).next().get_bytes_as('fulltext'))
1541
        key_b = self.get_simple_key('b')
1542
        f.add_lines(key_b, self.get_parents([key_a]), ['\n'])
1543
        self.assertEqual('\n',
1544
            f.get_record_stream([key_b], 'unordered', True
1545
                ).next().get_bytes_as('fulltext'))
1546
1547
    def test_get_record_stream_empty(self):
1548
        """An empty stream can be requested without error."""
1549
        f = self.get_versionedfiles()
1550
        entries = f.get_record_stream([], 'unordered', False)
1551
        self.assertEqual([], list(entries))
1552
1553
    def assertValidStorageKind(self, storage_kind):
1554
        """Assert that storage_kind is a valid storage_kind."""
1555
        self.assertSubset([storage_kind],
1556
            ['mpdiff', 'knit-annotated-ft', 'knit-annotated-delta',
1557
             'knit-ft', 'knit-delta', 'fulltext', 'knit-annotated-ft-gz',
1558
             'knit-annotated-delta-gz', 'knit-ft-gz', 'knit-delta-gz'])
1559
1560
    def capture_stream(self, f, entries, on_seen, parents):
1561
        """Capture a stream for testing."""
1562
        for factory in entries:
1563
            on_seen(factory.key)
1564
            self.assertValidStorageKind(factory.storage_kind)
1565
            self.assertEqual(f.get_sha1s([factory.key])[0], factory.sha1)
1566
            self.assertEqual(parents[factory.key], factory.parents)
1567
            self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1568
                str)
1569
1570
    def test_get_record_stream_interface(self):
1571
        """each item in a stream has to provide a regular interface."""
1572
        files = self.get_versionedfiles()
1573
        self.get_diamond_files(files)
1574
        keys, _ = self.get_keys_and_sort_order()
1575
        parent_map = files.get_parent_map(keys)
1576
        entries = files.get_record_stream(keys, 'unordered', False)
1577
        seen = set()
1578
        self.capture_stream(files, entries, seen.add, parent_map)
1579
        self.assertEqual(set(keys), seen)
1580
1581
    def get_simple_key(self, suffix):
1582
        """Return a key for the object under test."""
1583
        if self.key_length == 1:
1584
            return (suffix,)
1585
        else:
1586
            return ('FileA',) + (suffix,)
1587
1588
    def get_keys_and_sort_order(self):
1589
        """Get diamond test keys list, and their sort ordering."""
1590
        if self.key_length == 1:
1591
            keys = [('merged',), ('left',), ('right',), ('base',)]
1592
            sort_order = {('merged',):2, ('left',):1, ('right',):1, ('base',):0}
1593
        else:
1594
            keys = [
1595
                ('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1596
                ('FileA', 'base'),
1597
                ('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1598
                ('FileB', 'base'),
1599
                ]
1600
            sort_order = {
1601
                ('FileA', 'merged'):2, ('FileA', 'left'):1, ('FileA', 'right'):1,
1602
                ('FileA', 'base'):0,
1603
                ('FileB', 'merged'):2, ('FileB', 'left'):1, ('FileB', 'right'):1,
1604
                ('FileB', 'base'):0,
1605
                }
1606
        return keys, sort_order
1607
1608
    def test_get_record_stream_interface_ordered(self):
1609
        """each item in a stream has to provide a regular interface."""
1610
        files = self.get_versionedfiles()
1611
        self.get_diamond_files(files)
1612
        keys, sort_order = self.get_keys_and_sort_order()
1613
        parent_map = files.get_parent_map(keys)
1614
        entries = files.get_record_stream(keys, 'topological', False)
1615
        seen = []
1616
        self.capture_stream(files, entries, seen.append, parent_map)
1617
        self.assertStreamOrder(sort_order, seen, keys)
1618
1619
    def test_get_record_stream_interface_ordered_with_delta_closure(self):
1620
        """each item must be accessible as a fulltext."""
1621
        files = self.get_versionedfiles()
1622
        self.get_diamond_files(files)
1623
        keys, sort_order = self.get_keys_and_sort_order()
1624
        parent_map = files.get_parent_map(keys)
1625
        entries = files.get_record_stream(keys, 'topological', True)
1626
        seen = []
1627
        for factory in entries:
1628
            seen.append(factory.key)
1629
            self.assertValidStorageKind(factory.storage_kind)
1630
            self.assertSubset([factory.sha1], [None, files.get_sha1s([factory.key])[0]])
1631
            self.assertEqual(parent_map[factory.key], factory.parents)
1632
            # self.assertEqual(files.get_text(factory.key),
1633
            self.assertIsInstance(factory.get_bytes_as('fulltext'), str)
1634
            self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1635
                str)
1636
        self.assertStreamOrder(sort_order, seen, keys)
1637
1638
    def assertStreamOrder(self, sort_order, seen, keys):
1639
        self.assertEqual(len(set(seen)), len(keys))
1640
        if self.key_length == 1:
1641
            lows = {():0}
1642
        else:
1643
            lows = {('FileA',):0, ('FileB',):0}
1644
        if not self.graph:
1645
            self.assertEqual(set(keys), set(seen))
1646
        else:
1647
            for key in seen:
1648
                sort_pos = sort_order[key]
1649
                self.assertTrue(sort_pos >= lows[key[:-1]],
1650
                    "Out of order in sorted stream: %r, %r" % (key, seen))
1651
                lows[key[:-1]] = sort_pos
1652
1653
    def test_get_record_stream_unknown_storage_kind_raises(self):
1654
        """Asking for a storage kind that the stream cannot supply raises."""
1655
        files = self.get_versionedfiles()
1656
        self.get_diamond_files(files)
1657
        if self.key_length == 1:
1658
            keys = [('merged',), ('left',), ('right',), ('base',)]
1659
        else:
1660
            keys = [
1661
                ('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1662
                ('FileA', 'base'),
1663
                ('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1664
                ('FileB', 'base'),
1665
                ]
1666
        parent_map = files.get_parent_map(keys)
1667
        entries = files.get_record_stream(keys, 'unordered', False)
1668
        # We track the contents because we should be able to try, fail a
1669
        # particular kind and then ask for one that works and continue.
1670
        seen = set()
1671
        for factory in entries:
1672
            seen.add(factory.key)
1673
            self.assertValidStorageKind(factory.storage_kind)
1674
            self.assertEqual(files.get_sha1s([factory.key])[0], factory.sha1)
1675
            self.assertEqual(parent_map[factory.key], factory.parents)
1676
            # currently no stream emits mpdiff
1677
            self.assertRaises(errors.UnavailableRepresentation,
1678
                factory.get_bytes_as, 'mpdiff')
1679
            self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1680
                str)
1681
        self.assertEqual(set(keys), seen)
1682
1683
    def test_get_record_stream_missing_records_are_absent(self):
1684
        files = self.get_versionedfiles()
1685
        self.get_diamond_files(files)
1686
        if self.key_length == 1:
1687
            keys = [('merged',), ('left',), ('right',), ('absent',), ('base',)]
1688
        else:
1689
            keys = [
1690
                ('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1691
                ('FileA', 'absent'), ('FileA', 'base'),
1692
                ('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1693
                ('FileB', 'absent'), ('FileB', 'base'),
1694
                ('absent', 'absent'),
1695
                ]
1696
        parent_map = files.get_parent_map(keys)
1697
        entries = files.get_record_stream(keys, 'unordered', False)
1698
        self.assertAbsentRecord(files, keys, parent_map, entries)
1699
        entries = files.get_record_stream(keys, 'topological', False)
1700
        self.assertAbsentRecord(files, keys, parent_map, entries)
1701
1702
    def assertAbsentRecord(self, files, keys, parents, entries):
1703
        """Helper for test_get_record_stream_missing_records_are_absent."""
1704
        seen = set()
1705
        for factory in entries:
1706
            seen.add(factory.key)
1707
            if factory.key[-1] == 'absent':
1708
                self.assertEqual('absent', factory.storage_kind)
1709
                self.assertEqual(None, factory.sha1)
1710
                self.assertEqual(None, factory.parents)
1711
            else:
1712
                self.assertValidStorageKind(factory.storage_kind)
1713
                self.assertEqual(files.get_sha1s([factory.key])[0], factory.sha1)
1714
                self.assertEqual(parents[factory.key], factory.parents)
1715
                self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1716
                    str)
1717
        self.assertEqual(set(keys), seen)
1718
1719
    def test_filter_absent_records(self):
1720
        """Requested missing records can be filter trivially."""
1721
        files = self.get_versionedfiles()
1722
        self.get_diamond_files(files)
1723
        keys, _ = self.get_keys_and_sort_order()
1724
        parent_map = files.get_parent_map(keys)
1725
        # Add an absent record in the middle of the present keys. (We don't ask
1726
        # for just absent keys to ensure that content before and after the
1727
        # absent keys is still delivered).
1728
        present_keys = list(keys)
1729
        if self.key_length == 1:
1730
            keys.insert(2, ('extra',))
1731
        else:
1732
            keys.insert(2, ('extra', 'extra'))
1733
        entries = files.get_record_stream(keys, 'unordered', False)
1734
        seen = set()
1735
        self.capture_stream(files, versionedfile.filter_absent(entries), seen.add,
1736
            parent_map)
1737
        self.assertEqual(set(present_keys), seen)
1738
1739
    def get_mapper(self):
1740
        """Get a mapper suitable for the key length of the test interface."""
1741
        if self.key_length == 1:
1742
            return ConstantMapper('source')
1743
        else:
1744
            return HashEscapedPrefixMapper()
1745
1746
    def get_parents(self, parents):
1747
        """Get parents, taking self.graph into consideration."""
1748
        if self.graph:
1749
            return parents
1750
        else:
1751
            return None
1752
1753
    def test_get_parent_map(self):
1754
        files = self.get_versionedfiles()
1755
        if self.key_length == 1:
1756
            parent_details = [
1757
                (('r0',), self.get_parents(())),
1758
                (('r1',), self.get_parents((('r0',),))),
1759
                (('r2',), self.get_parents(())),
1760
                (('r3',), self.get_parents(())),
1761
                (('m',), self.get_parents((('r0',),('r1',),('r2',),('r3',)))),
1762
                ]
1763
        else:
1764
            parent_details = [
1765
                (('FileA', 'r0'), self.get_parents(())),
1766
                (('FileA', 'r1'), self.get_parents((('FileA', 'r0'),))),
1767
                (('FileA', 'r2'), self.get_parents(())),
1768
                (('FileA', 'r3'), self.get_parents(())),
1769
                (('FileA', 'm'), self.get_parents((('FileA', 'r0'),
1770
                    ('FileA', 'r1'), ('FileA', 'r2'), ('FileA', 'r3')))),
1771
                ]
1772
        for key, parents in parent_details:
1773
            files.add_lines(key, parents, [])
1774
            # immediately after adding it should be queryable.
1775
            self.assertEqual({key:parents}, files.get_parent_map([key]))
1776
        # We can ask for an empty set
1777
        self.assertEqual({}, files.get_parent_map([]))
1778
        # We can ask for many keys
1779
        all_parents = dict(parent_details)
1780
        self.assertEqual(all_parents, files.get_parent_map(all_parents.keys()))
1781
        # Absent keys are just not included in the result.
1782
        keys = all_parents.keys()
1783
        if self.key_length == 1:
1784
            keys.insert(1, ('missing',))
1785
        else:
1786
            keys.insert(1, ('missing', 'missing'))
1787
        # Absent keys are just ignored
1788
        self.assertEqual(all_parents, files.get_parent_map(keys))
1789
1790
    def test_get_sha1s(self):
1791
        files = self.get_versionedfiles()
1792
        self.get_diamond_files(files)
1793
        if self.key_length == 1:
1794
            keys = [('base',), ('origin',), ('left',), ('merged',), ('right',)]
1795
        else:
1796
            # ask for shas from different prefixes.
1797
            keys = [
1798
                ('FileA', 'base'), ('FileB', 'origin'), ('FileA', 'left'),
1799
                ('FileA', 'merged'), ('FileB', 'right'),
1800
                ]
1801
        self.assertEqual([
1802
            '51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',
1803
            '00e364d235126be43292ab09cb4686cf703ddc17',
1804
            'a8478686da38e370e32e42e8a0c220e33ee9132f',
1805
            'ed8bce375198ea62444dc71952b22cfc2b09226d',
1806
            '9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',
1807
            ],
1808
            files.get_sha1s(keys))
1809
        
1810
    def test_insert_record_stream_empty(self):
1811
        """Inserting an empty record stream should work."""
1812
        files = self.get_versionedfiles()
1813
        files.insert_record_stream([])
1814
1815
    def assertIdenticalVersionedFile(self, expected, actual):
1816
        """Assert that left and right have the same contents."""
1817
        self.assertEqual(set(actual.keys()), set(expected.keys()))
1818
        actual_parents = actual.get_parent_map(actual.keys())
1819
        if self.graph:
1820
            self.assertEqual(actual_parents, expected.get_parent_map(expected.keys()))
1821
        else:
1822
            for key, parents in actual_parents.items():
1823
                self.assertEqual(None, parents)
1824
        for key in actual.keys():
1825
            actual_text = actual.get_record_stream(
1826
                [key], 'unordered', True).next().get_bytes_as('fulltext')
1827
            expected_text = expected.get_record_stream(
1828
                [key], 'unordered', True).next().get_bytes_as('fulltext')
1829
            self.assertEqual(actual_text, expected_text)
1830
1831
    def test_insert_record_stream_fulltexts(self):
1832
        """Any file should accept a stream of fulltexts."""
1833
        files = self.get_versionedfiles()
1834
        mapper = self.get_mapper()
1835
        source_transport = self.get_transport('source')
1836
        source_transport.mkdir('.')
1837
        # weaves always output fulltexts.
1838
        source = make_versioned_files_factory(WeaveFile, mapper)(
1839
            source_transport)
1840
        self.get_diamond_files(source, trailing_eol=False)
1841
        stream = source.get_record_stream(source.keys(), 'topological',
1842
            False)
1843
        files.insert_record_stream(stream)
1844
        self.assertIdenticalVersionedFile(source, files)
1845
1846
    def test_insert_record_stream_fulltexts_noeol(self):
1847
        """Any file should accept a stream of fulltexts."""
1848
        files = self.get_versionedfiles()
1849
        mapper = self.get_mapper()
1850
        source_transport = self.get_transport('source')
1851
        source_transport.mkdir('.')
1852
        # weaves always output fulltexts.
1853
        source = make_versioned_files_factory(WeaveFile, mapper)(
1854
            source_transport)
1855
        self.get_diamond_files(source, trailing_eol=False)
1856
        stream = source.get_record_stream(source.keys(), 'topological',
1857
            False)
1858
        files.insert_record_stream(stream)
1859
        self.assertIdenticalVersionedFile(source, files)
1860
1861
    def test_insert_record_stream_annotated_knits(self):
1862
        """Any file should accept a stream from plain knits."""
1863
        files = self.get_versionedfiles()
1864
        mapper = self.get_mapper()
1865
        source_transport = self.get_transport('source')
1866
        source_transport.mkdir('.')
1867
        source = make_file_factory(True, mapper)(source_transport)
1868
        self.get_diamond_files(source)
1869
        stream = source.get_record_stream(source.keys(), 'topological',
1870
            False)
1871
        files.insert_record_stream(stream)
1872
        self.assertIdenticalVersionedFile(source, files)
1873
1874
    def test_insert_record_stream_annotated_knits_noeol(self):
1875
        """Any file should accept a stream from plain knits."""
1876
        files = self.get_versionedfiles()
1877
        mapper = self.get_mapper()
1878
        source_transport = self.get_transport('source')
1879
        source_transport.mkdir('.')
1880
        source = make_file_factory(True, mapper)(source_transport)
1881
        self.get_diamond_files(source, trailing_eol=False)
1882
        stream = source.get_record_stream(source.keys(), 'topological',
1883
            False)
1884
        files.insert_record_stream(stream)
1885
        self.assertIdenticalVersionedFile(source, files)
1886
1887
    def test_insert_record_stream_plain_knits(self):
1888
        """Any file should accept a stream from plain knits."""
1889
        files = self.get_versionedfiles()
1890
        mapper = self.get_mapper()
1891
        source_transport = self.get_transport('source')
1892
        source_transport.mkdir('.')
1893
        source = make_file_factory(False, mapper)(source_transport)
1894
        self.get_diamond_files(source)
1895
        stream = source.get_record_stream(source.keys(), 'topological',
1896
            False)
1897
        files.insert_record_stream(stream)
1898
        self.assertIdenticalVersionedFile(source, files)
1899
1900
    def test_insert_record_stream_plain_knits_noeol(self):
1901
        """Any file should accept a stream from plain knits."""
1902
        files = self.get_versionedfiles()
1903
        mapper = self.get_mapper()
1904
        source_transport = self.get_transport('source')
1905
        source_transport.mkdir('.')
1906
        source = make_file_factory(False, mapper)(source_transport)
1907
        self.get_diamond_files(source, trailing_eol=False)
1908
        stream = source.get_record_stream(source.keys(), 'topological',
1909
            False)
1910
        files.insert_record_stream(stream)
1911
        self.assertIdenticalVersionedFile(source, files)
1912
1913
    def test_insert_record_stream_existing_keys(self):
1914
        """Inserting keys already in a file should not error."""
1915
        files = self.get_versionedfiles()
1916
        source = self.get_versionedfiles('source')
1917
        self.get_diamond_files(source)
1918
        # insert some keys into f.
1919
        self.get_diamond_files(files, left_only=True)
1920
        stream = source.get_record_stream(source.keys(), 'topological',
1921
            False)
1922
        files.insert_record_stream(stream)
1923
        self.assertIdenticalVersionedFile(source, files)
1924
1925
    def test_insert_record_stream_missing_keys(self):
1926
        """Inserting a stream with absent keys should raise an error."""
1927
        files = self.get_versionedfiles()
1928
        source = self.get_versionedfiles('source')
1929
        stream = source.get_record_stream([('missing',) * self.key_length],
1930
            'topological', False)
1931
        self.assertRaises(errors.RevisionNotPresent, files.insert_record_stream,
1932
            stream)
1933
1934
    def test_insert_record_stream_out_of_order(self):
1935
        """An out of order stream can either error or work."""
1936
        files = self.get_versionedfiles()
1937
        source = self.get_versionedfiles('source')
1938
        self.get_diamond_files(source)
1939
        if self.key_length == 1:
1940
            origin_keys = [('origin',)]
1941
            end_keys = [('merged',), ('left',)]
1942
            start_keys = [('right',), ('base',)]
1943
        else:
1944
            origin_keys = [('FileA', 'origin'), ('FileB', 'origin')]
1945
            end_keys = [('FileA', 'merged',), ('FileA', 'left',),
1946
                ('FileB', 'merged',), ('FileB', 'left',)]
1947
            start_keys = [('FileA', 'right',), ('FileA', 'base',),
1948
                ('FileB', 'right',), ('FileB', 'base',)]
1949
        origin_entries = source.get_record_stream(origin_keys, 'unordered', False)
1950
        end_entries = source.get_record_stream(end_keys, 'topological', False)
1951
        start_entries = source.get_record_stream(start_keys, 'topological', False)
1952
        entries = chain(origin_entries, end_entries, start_entries)
1953
        try:
1954
            files.insert_record_stream(entries)
1955
        except RevisionNotPresent:
1956
            # Must not have corrupted the file.
1957
            files.check()
1958
        else:
1959
            self.assertIdenticalVersionedFile(source, files)
1960
1961
    def test_insert_record_stream_delta_missing_basis_no_corruption(self):
1962
        """Insertion where a needed basis is not included aborts safely."""
1963
        # We use a knit always here to be sure we are getting a binary delta.
1964
        mapper = self.get_mapper()
1965
        source_transport = self.get_transport('source')
1966
        source_transport.mkdir('.')
1967
        source = make_file_factory(False, mapper)(source_transport)
1968
        self.get_diamond_files(source)
1969
        entries = source.get_record_stream(['origin', 'merged'], 'unordered', False)
1970
        files = self.get_versionedfiles()
1971
        self.assertRaises(RevisionNotPresent, files.insert_record_stream,
1972
            entries)
1973
        files.check()
1974
        self.assertEqual({}, files.get_parent_map([]))
1975
1976
    def test_iter_lines_added_or_present_in_keys(self):
1977
        # test that we get at least an equalset of the lines added by
1978
        # versions in the store.
1979
        # the ordering here is to make a tree so that dumb searches have
1980
        # more changes to muck up.
1981
1982
        class InstrumentedProgress(progress.DummyProgress):
1983
1984
            def __init__(self):
1985
1986
                progress.DummyProgress.__init__(self)
1987
                self.updates = []
1988
1989
            def update(self, msg=None, current=None, total=None):
1990
                self.updates.append((msg, current, total))
1991
1992
        files = self.get_versionedfiles()
1993
        # add a base to get included
1994
        files.add_lines(self.get_simple_key('base'), (), ['base\n'])
1995
        # add a ancestor to be included on one side
1996
        files.add_lines(self.get_simple_key('lancestor'), (), ['lancestor\n'])
1997
        # add a ancestor to be included on the other side
1998
        files.add_lines(self.get_simple_key('rancestor'),
1999
            self.get_parents([self.get_simple_key('base')]), ['rancestor\n'])
2000
        # add a child of rancestor with no eofile-nl
2001
        files.add_lines(self.get_simple_key('child'),
2002
            self.get_parents([self.get_simple_key('rancestor')]),
2003
            ['base\n', 'child\n'])
2004
        # add a child of lancestor and base to join the two roots
2005
        files.add_lines(self.get_simple_key('otherchild'),
2006
            self.get_parents([self.get_simple_key('lancestor'),
2007
                self.get_simple_key('base')]),
2008
            ['base\n', 'lancestor\n', 'otherchild\n'])
2009
        def iter_with_keys(keys, expected):
2010
            # now we need to see what lines are returned, and how often.
2011
            lines = {}
2012
            progress = InstrumentedProgress()
2013
            # iterate over the lines
2014
            for line in files.iter_lines_added_or_present_in_keys(keys,
2015
                pb=progress):
2016
                lines.setdefault(line, 0)
2017
                lines[line] += 1
2018
            if []!= progress.updates:
2019
                self.assertEqual(expected, progress.updates)
2020
            return lines
2021
        lines = iter_with_keys(
2022
            [self.get_simple_key('child'), self.get_simple_key('otherchild')],
2023
            [('Walking content.', 0, 2),
2024
             ('Walking content.', 1, 2),
2025
             ('Walking content.', 2, 2)])
2026
        # we must see child and otherchild
2027
        self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2028
        self.assertTrue(
2029
            lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2030
        # we dont care if we got more than that.
2031
        
2032
        # test all lines
2033
        lines = iter_with_keys(files.keys(),
2034
            [('Walking content.', 0, 5),
2035
             ('Walking content.', 1, 5),
2036
             ('Walking content.', 2, 5),
2037
             ('Walking content.', 3, 5),
2038
             ('Walking content.', 4, 5),
2039
             ('Walking content.', 5, 5)])
2040
        # all lines must be seen at least once
2041
        self.assertTrue(lines[('base\n', self.get_simple_key('base'))] > 0)
2042
        self.assertTrue(
2043
            lines[('lancestor\n', self.get_simple_key('lancestor'))] > 0)
2044
        self.assertTrue(
2045
            lines[('rancestor\n', self.get_simple_key('rancestor'))] > 0)
2046
        self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2047
        self.assertTrue(
2048
            lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2049
2050
    def test_make_mpdiffs(self):
2051
        from bzrlib import multiparent
2052
        files = self.get_versionedfiles('source')
2053
        # add texts that should trip the knit maximum delta chain threshold
2054
        # as well as doing parallel chains of data in knits.
2055
        # this is done by two chains of 25 insertions
2056
        files.add_lines(self.get_simple_key('base'), [], ['line\n'])
2057
        files.add_lines(self.get_simple_key('noeol'),
2058
            self.get_parents([self.get_simple_key('base')]), ['line'])
2059
        # detailed eol tests:
2060
        # shared last line with parent no-eol
2061
        files.add_lines(self.get_simple_key('noeolsecond'),
2062
            self.get_parents([self.get_simple_key('noeol')]),
2063
                ['line\n', 'line'])
2064
        # differing last line with parent, both no-eol
2065
        files.add_lines(self.get_simple_key('noeolnotshared'),
2066
            self.get_parents([self.get_simple_key('noeolsecond')]),
2067
                ['line\n', 'phone'])
2068
        # add eol following a noneol parent, change content
2069
        files.add_lines(self.get_simple_key('eol'),
2070
            self.get_parents([self.get_simple_key('noeol')]), ['phone\n'])
2071
        # add eol following a noneol parent, no change content
2072
        files.add_lines(self.get_simple_key('eolline'),
2073
            self.get_parents([self.get_simple_key('noeol')]), ['line\n'])
2074
        # noeol with no parents:
2075
        files.add_lines(self.get_simple_key('noeolbase'), [], ['line'])
2076
        # noeol preceeding its leftmost parent in the output:
2077
        # this is done by making it a merge of two parents with no common
2078
        # anestry: noeolbase and noeol with the 
2079
        # later-inserted parent the leftmost.
2080
        files.add_lines(self.get_simple_key('eolbeforefirstparent'),
2081
            self.get_parents([self.get_simple_key('noeolbase'),
2082
                self.get_simple_key('noeol')]),
2083
            ['line'])
2084
        # two identical eol texts
2085
        files.add_lines(self.get_simple_key('noeoldup'),
2086
            self.get_parents([self.get_simple_key('noeol')]), ['line'])
2087
        next_parent = self.get_simple_key('base')
2088
        text_name = 'chain1-'
2089
        text = ['line\n']
2090
        sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
2091
                 1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
2092
                 2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
2093
                 3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
2094
                 4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
2095
                 5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
2096
                 6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
2097
                 7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
2098
                 8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
2099
                 9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
2100
                 10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
2101
                 11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
2102
                 12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
2103
                 13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
2104
                 14:'2c4b1736566b8ca6051e668de68650686a3922f2',
2105
                 15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
2106
                 16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
2107
                 17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
2108
                 18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
2109
                 19:'1ebed371807ba5935958ad0884595126e8c4e823',
2110
                 20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
2111
                 21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
2112
                 22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
2113
                 23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
2114
                 24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
2115
                 25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
2116
                 }
2117
        for depth in range(26):
2118
            new_version = self.get_simple_key(text_name + '%s' % depth)
2119
            text = text + ['line\n']
2120
            files.add_lines(new_version, self.get_parents([next_parent]), text)
2121
            next_parent = new_version
2122
        next_parent = self.get_simple_key('base')
2123
        text_name = 'chain2-'
2124
        text = ['line\n']
2125
        for depth in range(26):
2126
            new_version = self.get_simple_key(text_name + '%s' % depth)
2127
            text = text + ['line\n']
2128
            files.add_lines(new_version, self.get_parents([next_parent]), text)
2129
            next_parent = new_version
2130
        target = self.get_versionedfiles('target')
2131
        for key in multiparent.topo_iter_keys(files, files.keys()):
2132
            mpdiff = files.make_mpdiffs([key])[0]
2133
            parents = files.get_parent_map([key])[key] or []
2134
            target.add_mpdiffs(
2135
                [(key, parents, files.get_sha1s([key])[0], mpdiff)])
2136
            self.assertEqualDiff(
2137
                files.get_record_stream([key], 'unordered',
2138
                    True).next().get_bytes_as('fulltext'),
2139
                target.get_record_stream([key], 'unordered',
2140
                    True).next().get_bytes_as('fulltext')
2141
                )
2142
2143
    def test_keys(self):
2144
        # While use is discouraged, versions() is still needed by aspects of
2145
        # bzr.
2146
        files = self.get_versionedfiles()
2147
        self.assertEqual(set(), set(files.keys()))
2148
        if self.key_length == 1:
2149
            key = ('foo',)
2150
        else:
2151
            key = ('foo', 'bar',)
2152
        files.add_lines(key, (), [])
2153
        self.assertEqual(set([key]), set(files.keys()))