/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar
4103.3.4 by Martin Pool
Update test that depends on progress bar strings
1
# Copyright (C) 2005, 2009 Canonical Ltd
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
2
#
3
# Authors:
4
#   Johan Rydberg <jrydberg@gnu.org>
5
#
6
# This program is free software; you can redistribute it and/or modify
7
# it under the terms of the GNU General Public License as published by
8
# the Free Software Foundation; either version 2 of the License, or
9
# (at your option) any later version.
1887.1.1 by Adeodato Simó
Do not separate paragraphs in the copyright statement with blank lines,
10
#
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
11
# This program is distributed in the hope that it will be useful,
12
# but WITHOUT ANY WARRANTY; without even the implied warranty of
13
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14
# GNU General Public License for more details.
1887.1.1 by Adeodato Simó
Do not separate paragraphs in the copyright statement with blank lines,
15
#
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
16
# You should have received a copy of the GNU General Public License
17
# along with this program; if not, write to the Free Software
18
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
20
1704.2.15 by Martin Pool
Remove TODO about knit testing printed from test suite
21
# TODO: might be nice to create a versionedfile with some type of corruption
22
# considered typical and check that it can be detected/corrected.
23
4005.3.3 by Robert Collins
Test NetworkRecordStream with delta'd texts.
24
from itertools import chain, izip
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
25
from StringIO import StringIO
26
1563.2.6 by Robert Collins
Start check tests for knits (pending), and remove dead code.
27
import bzrlib
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
28
from bzrlib import (
29
    errors,
2309.4.7 by John Arbash Meinel
Update VersionedFile tests to ensure that they can take Unicode,
30
    osutils,
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
31
    progress,
32
    )
1563.2.11 by Robert Collins
Consolidate reweave and join as we have no separate usage, make reweave tests apply to all versionedfile implementations and deprecate the old reweave apis.
33
from bzrlib.errors import (
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
34
                           RevisionNotPresent,
1563.2.11 by Robert Collins
Consolidate reweave and join as we have no separate usage, make reweave tests apply to all versionedfile implementations and deprecate the old reweave apis.
35
                           RevisionAlreadyPresent,
36
                           WeaveParentMismatch
37
                           )
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
38
from bzrlib import knit as _mod_knit
2770.1.1 by Aaron Bentley
Initial implmentation of plain knit annotation
39
from bzrlib.knit import (
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
40
    cleanup_pack_knit,
41
    make_file_factory,
42
    make_pack_factory,
2770.1.1 by Aaron Bentley
Initial implmentation of plain knit annotation
43
    KnitAnnotateFactory,
2770.1.10 by Aaron Bentley
Merge bzr.dev
44
    KnitPlainFactory,
2770.1.1 by Aaron Bentley
Initial implmentation of plain knit annotation
45
    )
3350.3.14 by Robert Collins
Deprecate VersionedFile.join.
46
from bzrlib.symbol_versioning import one_four, one_five
3350.6.2 by Robert Collins
Prepare parameterised test environment.
47
from bzrlib.tests import (
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
48
    TestCase,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
49
    TestCaseWithMemoryTransport,
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
50
    TestNotApplicable,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
51
    TestSkipped,
52
    condition_isinstance,
53
    split_suite_by_condition,
4084.5.1 by Robert Collins
Bulk update all test adaptation into a single approach, using multiply_tests rather than test adapters.
54
    multiply_tests,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
55
    )
2929.3.8 by Vincent Ladeuil
Rename bzrlib/test/HTTPTestUtils.py to bzrlib/tests/http_utils.py and fix uses.
56
from bzrlib.tests.http_utils import TestCaseWithWebserver
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
57
from bzrlib.trace import mutter
1563.2.16 by Robert Collins
Change WeaveStore into VersionedFileStore and make its versoined file class parameterisable.
58
from bzrlib.transport import get_transport
1563.2.13 by Robert Collins
InterVersionedFile implemented.
59
from bzrlib.transport.memory import MemoryTransport
1684.3.1 by Robert Collins
Fix versioned file joins with empty targets.
60
from bzrlib.tsort import topo_sort
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
61
from bzrlib.tuned_gzip import GzipFile
1563.2.12 by Robert Collins
Checkpointing: created InterObject to factor out common inter object worker code, added InterVersionedFile and tests to allow making join work between any versionedfile.
62
import bzrlib.versionedfile as versionedfile
3350.6.2 by Robert Collins
Prepare parameterised test environment.
63
from bzrlib.versionedfile import (
64
    ConstantMapper,
65
    HashEscapedPrefixMapper,
66
    PrefixMapper,
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
67
    VirtualVersionedFiles,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
68
    make_versioned_files_factory,
69
    )
1563.2.9 by Robert Collins
Update versionedfile api tests to ensure that data is available after every operation.
70
from bzrlib.weave import WeaveFile
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
71
from bzrlib.weavefile import read_weave, write_weave
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
72
73
3350.6.2 by Robert Collins
Prepare parameterised test environment.
74
def load_tests(standard_tests, module, loader):
75
    """Parameterize VersionedFiles tests for different implementations."""
76
    to_adapt, result = split_suite_by_condition(
77
        standard_tests, condition_isinstance(TestVersionedFiles))
78
    # We want to be sure of behaviour for:
79
    # weaves prefix layout (weave texts)
80
    # individually named weaves (weave inventories)
81
    # annotated knits - prefix|hash|hash-escape layout, we test the third only
82
    #                   as it is the most complex mapper.
83
    # individually named knits
84
    # individual no-graph knits in packs (signatures)
85
    # individual graph knits in packs (inventories)
86
    # individual graph nocompression knits in packs (revisions)
87
    # plain text knits in packs (texts)
4084.5.1 by Robert Collins
Bulk update all test adaptation into a single approach, using multiply_tests rather than test adapters.
88
    len_one_scenarios = [
3350.6.2 by Robert Collins
Prepare parameterised test environment.
89
        ('weave-named', {
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
90
            'cleanup':None,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
91
            'factory':make_versioned_files_factory(WeaveFile,
92
                ConstantMapper('inventory')),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
93
            'graph':True,
94
            'key_length':1,
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
95
            'support_partial_insertion': False,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
96
            }),
97
        ('named-knit', {
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
98
            'cleanup':None,
99
            'factory':make_file_factory(False, ConstantMapper('revisions')),
100
            'graph':True,
101
            'key_length':1,
4009.3.7 by Andrew Bennetts
Most tests passing.
102
            'support_partial_insertion': False,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
103
            }),
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
104
        ('named-nograph-nodelta-knit-pack', {
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
105
            'cleanup':cleanup_pack_knit,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
106
            'factory':make_pack_factory(False, False, 1),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
107
            'graph':False,
108
            'key_length':1,
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
109
            'support_partial_insertion': False,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
110
            }),
111
        ('named-graph-knit-pack', {
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
112
            'cleanup':cleanup_pack_knit,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
113
            'factory':make_pack_factory(True, True, 1),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
114
            'graph':True,
115
            'key_length':1,
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
116
            'support_partial_insertion': True,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
117
            }),
118
        ('named-graph-nodelta-knit-pack', {
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
119
            'cleanup':cleanup_pack_knit,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
120
            'factory':make_pack_factory(True, False, 1),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
121
            'graph':True,
122
            'key_length':1,
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
123
            'support_partial_insertion': False,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
124
            }),
125
        ]
4084.5.1 by Robert Collins
Bulk update all test adaptation into a single approach, using multiply_tests rather than test adapters.
126
    len_two_scenarios = [
3350.6.2 by Robert Collins
Prepare parameterised test environment.
127
        ('weave-prefix', {
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
128
            'cleanup':None,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
129
            'factory':make_versioned_files_factory(WeaveFile,
130
                PrefixMapper()),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
131
            'graph':True,
132
            'key_length':2,
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
133
            'support_partial_insertion': False,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
134
            }),
135
        ('annotated-knit-escape', {
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
136
            'cleanup':None,
137
            'factory':make_file_factory(True, HashEscapedPrefixMapper()),
138
            'graph':True,
139
            'key_length':2,
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
140
            'support_partial_insertion': False,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
141
            }),
142
        ('plain-knit-pack', {
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
143
            'cleanup':cleanup_pack_knit,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
144
            'factory':make_pack_factory(True, True, 2),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
145
            'graph':True,
146
            'key_length':2,
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
147
            'support_partial_insertion': True,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
148
            }),
149
        ]
4084.5.1 by Robert Collins
Bulk update all test adaptation into a single approach, using multiply_tests rather than test adapters.
150
    scenarios = len_one_scenarios + len_two_scenarios
151
    return multiply_tests(to_adapt, scenarios, result)
3350.6.2 by Robert Collins
Prepare parameterised test environment.
152
153
3350.3.11 by Robert Collins
Test inserting a stream that overlaps the current content of a knit does not error.
154
def get_diamond_vf(f, trailing_eol=True, left_only=False):
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
155
    """Get a diamond graph to exercise deltas and merges.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
156
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
157
    :param trailing_eol: If True end the last line with \n.
158
    """
159
    parents = {
160
        'origin': (),
161
        'base': (('origin',),),
162
        'left': (('base',),),
163
        'right': (('base',),),
164
        'merged': (('left',), ('right',)),
165
        }
166
    # insert a diamond graph to exercise deltas and merges.
167
    if trailing_eol:
168
        last_char = '\n'
169
    else:
170
        last_char = ''
171
    f.add_lines('origin', [], ['origin' + last_char])
172
    f.add_lines('base', ['origin'], ['base' + last_char])
173
    f.add_lines('left', ['base'], ['base\n', 'left' + last_char])
3350.3.11 by Robert Collins
Test inserting a stream that overlaps the current content of a knit does not error.
174
    if not left_only:
175
        f.add_lines('right', ['base'],
176
            ['base\n', 'right' + last_char])
177
        f.add_lines('merged', ['left', 'right'],
178
            ['base\n', 'left\n', 'right\n', 'merged' + last_char])
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
179
    return f, parents
180
181
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
182
def get_diamond_files(files, key_length, trailing_eol=True, left_only=False,
183
    nograph=False):
184
    """Get a diamond graph to exercise deltas and merges.
185
186
    This creates a 5-node graph in files. If files supports 2-length keys two
187
    graphs are made to exercise the support for multiple ids.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
188
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
189
    :param trailing_eol: If True end the last line with \n.
190
    :param key_length: The length of keys in files. Currently supports length 1
191
        and 2 keys.
192
    :param left_only: If True do not add the right and merged nodes.
193
    :param nograph: If True, do not provide parents to the add_lines calls;
194
        this is useful for tests that need inserted data but have graphless
195
        stores.
196
    :return: The results of the add_lines calls.
197
    """
198
    if key_length == 1:
199
        prefixes = [()]
200
    else:
201
        prefixes = [('FileA',), ('FileB',)]
202
    # insert a diamond graph to exercise deltas and merges.
203
    if trailing_eol:
204
        last_char = '\n'
205
    else:
206
        last_char = ''
207
    result = []
208
    def get_parents(suffix_list):
209
        if nograph:
210
            return ()
211
        else:
212
            result = [prefix + suffix for suffix in suffix_list]
213
            return result
214
    # we loop over each key because that spreads the inserts across prefixes,
215
    # which is how commit operates.
216
    for prefix in prefixes:
217
        result.append(files.add_lines(prefix + ('origin',), (),
218
            ['origin' + last_char]))
219
    for prefix in prefixes:
220
        result.append(files.add_lines(prefix + ('base',),
221
            get_parents([('origin',)]), ['base' + last_char]))
222
    for prefix in prefixes:
223
        result.append(files.add_lines(prefix + ('left',),
224
            get_parents([('base',)]),
225
            ['base\n', 'left' + last_char]))
226
    if not left_only:
227
        for prefix in prefixes:
228
            result.append(files.add_lines(prefix + ('right',),
229
                get_parents([('base',)]),
230
                ['base\n', 'right' + last_char]))
231
        for prefix in prefixes:
232
            result.append(files.add_lines(prefix + ('merged',),
233
                get_parents([('left',), ('right',)]),
234
                ['base\n', 'left\n', 'right\n', 'merged' + last_char]))
235
    return result
236
237
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
238
class VersionedFileTestMixIn(object):
239
    """A mixin test class for testing VersionedFiles.
240
241
    This is not an adaptor-style test at this point because
242
    theres no dynamic substitution of versioned file implementations,
243
    they are strictly controlled by their owning repositories.
244
    """
245
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
246
    def get_transaction(self):
247
        if not hasattr(self, '_transaction'):
248
            self._transaction = None
249
        return self._transaction
250
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
251
    def test_add(self):
252
        f = self.get_file()
253
        f.add_lines('r0', [], ['a\n', 'b\n'])
254
        f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
1563.2.9 by Robert Collins
Update versionedfile api tests to ensure that data is available after every operation.
255
        def verify_file(f):
256
            versions = f.versions()
257
            self.assertTrue('r0' in versions)
258
            self.assertTrue('r1' in versions)
259
            self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
260
            self.assertEquals(f.get_text('r0'), 'a\nb\n')
261
            self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
1563.2.18 by Robert Collins
get knit repositories really using knits for text storage.
262
            self.assertEqual(2, len(f))
263
            self.assertEqual(2, f.num_versions())
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
264
1563.2.9 by Robert Collins
Update versionedfile api tests to ensure that data is available after every operation.
265
            self.assertRaises(RevisionNotPresent,
266
                f.add_lines, 'r2', ['foo'], [])
267
            self.assertRaises(RevisionAlreadyPresent,
268
                f.add_lines, 'r1', [], [])
269
        verify_file(f)
1666.1.6 by Robert Collins
Make knit the default format.
270
        # this checks that reopen with create=True does not break anything.
271
        f = self.reopen_file(create=True)
1563.2.9 by Robert Collins
Update versionedfile api tests to ensure that data is available after every operation.
272
        verify_file(f)
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
273
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
274
    def test_adds_with_parent_texts(self):
275
        f = self.get_file()
276
        parent_texts = {}
2776.1.1 by Robert Collins
* The ``add_lines`` methods on ``VersionedFile`` implementations has changed
277
        _, _, parent_texts['r0'] = f.add_lines('r0', [], ['a\n', 'b\n'])
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
278
        try:
2776.1.1 by Robert Collins
* The ``add_lines`` methods on ``VersionedFile`` implementations has changed
279
            _, _, parent_texts['r1'] = f.add_lines_with_ghosts('r1',
280
                ['r0', 'ghost'], ['b\n', 'c\n'], parent_texts=parent_texts)
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
281
        except NotImplementedError:
282
            # if the format doesn't support ghosts, just add normally.
2776.1.1 by Robert Collins
* The ``add_lines`` methods on ``VersionedFile`` implementations has changed
283
            _, _, parent_texts['r1'] = f.add_lines('r1',
284
                ['r0'], ['b\n', 'c\n'], parent_texts=parent_texts)
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
285
        f.add_lines('r2', ['r1'], ['c\n', 'd\n'], parent_texts=parent_texts)
286
        self.assertNotEqual(None, parent_texts['r0'])
287
        self.assertNotEqual(None, parent_texts['r1'])
288
        def verify_file(f):
289
            versions = f.versions()
290
            self.assertTrue('r0' in versions)
291
            self.assertTrue('r1' in versions)
292
            self.assertTrue('r2' in versions)
293
            self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
294
            self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
295
            self.assertEquals(f.get_lines('r2'), ['c\n', 'd\n'])
296
            self.assertEqual(3, f.num_versions())
297
            origins = f.annotate('r1')
298
            self.assertEquals(origins[0][0], 'r0')
299
            self.assertEquals(origins[1][0], 'r1')
300
            origins = f.annotate('r2')
301
            self.assertEquals(origins[0][0], 'r1')
302
            self.assertEquals(origins[1][0], 'r2')
303
304
        verify_file(f)
305
        f = self.reopen_file()
306
        verify_file(f)
307
2805.6.7 by Robert Collins
Review feedback.
308
    def test_add_unicode_content(self):
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
309
        # unicode content is not permitted in versioned files.
2805.6.7 by Robert Collins
Review feedback.
310
        # versioned files version sequences of bytes only.
311
        vf = self.get_file()
312
        self.assertRaises(errors.BzrBadParameterUnicode,
313
            vf.add_lines, 'a', [], ['a\n', u'b\n', 'c\n'])
314
        self.assertRaises(
315
            (errors.BzrBadParameterUnicode, NotImplementedError),
316
            vf.add_lines_with_ghosts, 'a', [], ['a\n', u'b\n', 'c\n'])
317
2520.4.150 by Aaron Bentley
Test that non-Weave uses left_matching_blocks for add_lines
318
    def test_add_follows_left_matching_blocks(self):
319
        """If we change left_matching_blocks, delta changes
320
321
        Note: There are multiple correct deltas in this case, because
322
        we start with 1 "a" and we get 3.
323
        """
324
        vf = self.get_file()
325
        if isinstance(vf, WeaveFile):
326
            raise TestSkipped("WeaveFile ignores left_matching_blocks")
327
        vf.add_lines('1', [], ['a\n'])
328
        vf.add_lines('2', ['1'], ['a\n', 'a\n', 'a\n'],
329
                     left_matching_blocks=[(0, 0, 1), (1, 3, 0)])
2794.1.2 by Robert Collins
Nuke versioned file add/get delta support, allowing easy simplification of unannotated Content, reducing memory copies and friction during commit on unannotated texts.
330
        self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('2'))
2520.4.150 by Aaron Bentley
Test that non-Weave uses left_matching_blocks for add_lines
331
        vf.add_lines('3', ['1'], ['a\n', 'a\n', 'a\n'],
332
                     left_matching_blocks=[(0, 2, 1), (1, 3, 0)])
2794.1.2 by Robert Collins
Nuke versioned file add/get delta support, allowing easy simplification of unannotated Content, reducing memory copies and friction during commit on unannotated texts.
333
        self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('3'))
2520.4.150 by Aaron Bentley
Test that non-Weave uses left_matching_blocks for add_lines
334
2805.6.7 by Robert Collins
Review feedback.
335
    def test_inline_newline_throws(self):
336
        # \r characters are not permitted in lines being added
337
        vf = self.get_file()
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
338
        self.assertRaises(errors.BzrBadParameterContainsNewline,
2805.6.7 by Robert Collins
Review feedback.
339
            vf.add_lines, 'a', [], ['a\n\n'])
340
        self.assertRaises(
341
            (errors.BzrBadParameterContainsNewline, NotImplementedError),
342
            vf.add_lines_with_ghosts, 'a', [], ['a\n\n'])
343
        # but inline CR's are allowed
344
        vf.add_lines('a', [], ['a\r\n'])
345
        try:
346
            vf.add_lines_with_ghosts('b', [], ['a\r\n'])
347
        except NotImplementedError:
348
            pass
349
2229.2.1 by Aaron Bentley
Reject reserved ids in versiondfile, tree, branch and repository
350
    def test_add_reserved(self):
351
        vf = self.get_file()
352
        self.assertRaises(errors.ReservedId,
353
            vf.add_lines, 'a:', [], ['a\n', 'b\n', 'c\n'])
354
2794.1.1 by Robert Collins
Allow knits to be instructed not to add a text based on a sha, for commit.
355
    def test_add_lines_nostoresha(self):
356
        """When nostore_sha is supplied using old content raises."""
357
        vf = self.get_file()
358
        empty_text = ('a', [])
359
        sample_text_nl = ('b', ["foo\n", "bar\n"])
360
        sample_text_no_nl = ('c', ["foo\n", "bar"])
361
        shas = []
362
        for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
363
            sha, _, _ = vf.add_lines(version, [], lines)
364
            shas.append(sha)
365
        # we now have a copy of all the lines in the vf.
366
        for sha, (version, lines) in zip(
367
            shas, (empty_text, sample_text_nl, sample_text_no_nl)):
368
            self.assertRaises(errors.ExistingContent,
369
                vf.add_lines, version + "2", [], lines,
370
                nostore_sha=sha)
371
            # and no new version should have been added.
372
            self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
373
                version + "2")
374
2803.1.1 by Robert Collins
Fix typo in ghosts version of test_add_lines_nostoresha.
375
    def test_add_lines_with_ghosts_nostoresha(self):
2794.1.1 by Robert Collins
Allow knits to be instructed not to add a text based on a sha, for commit.
376
        """When nostore_sha is supplied using old content raises."""
377
        vf = self.get_file()
378
        empty_text = ('a', [])
379
        sample_text_nl = ('b', ["foo\n", "bar\n"])
380
        sample_text_no_nl = ('c', ["foo\n", "bar"])
381
        shas = []
382
        for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
383
            sha, _, _ = vf.add_lines(version, [], lines)
384
            shas.append(sha)
385
        # we now have a copy of all the lines in the vf.
386
        # is the test applicable to this vf implementation?
387
        try:
388
            vf.add_lines_with_ghosts('d', [], [])
389
        except NotImplementedError:
390
            raise TestSkipped("add_lines_with_ghosts is optional")
391
        for sha, (version, lines) in zip(
392
            shas, (empty_text, sample_text_nl, sample_text_no_nl)):
393
            self.assertRaises(errors.ExistingContent,
394
                vf.add_lines_with_ghosts, version + "2", [], lines,
395
                nostore_sha=sha)
396
            # and no new version should have been added.
397
            self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
398
                version + "2")
399
2776.1.1 by Robert Collins
* The ``add_lines`` methods on ``VersionedFile`` implementations has changed
400
    def test_add_lines_return_value(self):
401
        # add_lines should return the sha1 and the text size.
402
        vf = self.get_file()
403
        empty_text = ('a', [])
404
        sample_text_nl = ('b', ["foo\n", "bar\n"])
405
        sample_text_no_nl = ('c', ["foo\n", "bar"])
406
        # check results for the three cases:
407
        for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
408
            # the first two elements are the same for all versioned files:
409
            # - the digest and the size of the text. For some versioned files
410
            #   additional data is returned in additional tuple elements.
411
            result = vf.add_lines(version, [], lines)
412
            self.assertEqual(3, len(result))
413
            self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
414
                result[0:2])
415
        # parents should not affect the result:
416
        lines = sample_text_nl[1]
417
        self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
418
            vf.add_lines('d', ['b', 'c'], lines)[0:2])
419
2229.2.1 by Aaron Bentley
Reject reserved ids in versiondfile, tree, branch and repository
420
    def test_get_reserved(self):
421
        vf = self.get_file()
422
        self.assertRaises(errors.ReservedId, vf.get_texts, ['b:'])
423
        self.assertRaises(errors.ReservedId, vf.get_lines, 'b:')
424
        self.assertRaises(errors.ReservedId, vf.get_text, 'b:')
425
3468.2.4 by Martin Pool
Test and fix #234748 problems in trailing newline diffs
426
    def test_add_unchanged_last_line_noeol_snapshot(self):
427
        """Add a text with an unchanged last line with no eol should work."""
428
        # Test adding this in a number of chain lengths; because the interface
429
        # for VersionedFile does not allow forcing a specific chain length, we
430
        # just use a small base to get the first snapshot, then a much longer
431
        # first line for the next add (which will make the third add snapshot)
432
        # and so on. 20 has been chosen as an aribtrary figure - knits use 200
433
        # as a capped delta length, but ideally we would have some way of
434
        # tuning the test to the store (e.g. keep going until a snapshot
435
        # happens).
436
        for length in range(20):
437
            version_lines = {}
438
            vf = self.get_file('case-%d' % length)
439
            prefix = 'step-%d'
440
            parents = []
441
            for step in range(length):
442
                version = prefix % step
443
                lines = (['prelude \n'] * step) + ['line']
444
                vf.add_lines(version, parents, lines)
445
                version_lines[version] = lines
446
                parents = [version]
447
            vf.add_lines('no-eol', parents, ['line'])
448
            vf.get_texts(version_lines.keys())
449
            self.assertEqualDiff('line', vf.get_text('no-eol'))
450
451
    def test_get_texts_eol_variation(self):
452
        # similar to the failure in <http://bugs.launchpad.net/234748>
453
        vf = self.get_file()
454
        sample_text_nl = ["line\n"]
455
        sample_text_no_nl = ["line"]
456
        versions = []
457
        version_lines = {}
458
        parents = []
459
        for i in range(4):
460
            version = 'v%d' % i
461
            if i % 2:
462
                lines = sample_text_nl
463
            else:
464
                lines = sample_text_no_nl
465
            # left_matching blocks is an internal api; it operates on the
466
            # *internal* representation for a knit, which is with *all* lines
467
            # being normalised to end with \n - even the final line in a no_nl
468
            # file. Using it here ensures that a broken internal implementation
469
            # (which is what this test tests) will generate a correct line
470
            # delta (which is to say, an empty delta).
471
            vf.add_lines(version, parents, lines,
472
                left_matching_blocks=[(0, 0, 1)])
473
            parents = [version]
474
            versions.append(version)
475
            version_lines[version] = lines
476
        vf.check()
477
        vf.get_texts(versions)
478
        vf.get_texts(reversed(versions))
479
3460.2.1 by Robert Collins
* Inserting a bundle which changes the contents of a file with no trailing
480
    def test_add_lines_with_matching_blocks_noeol_last_line(self):
481
        """Add a text with an unchanged last line with no eol should work."""
482
        from bzrlib import multiparent
483
        # Hand verified sha1 of the text we're adding.
484
        sha1 = '6a1d115ec7b60afb664dc14890b5af5ce3c827a4'
485
        # Create a mpdiff which adds a new line before the trailing line, and
486
        # reuse the last line unaltered (which can cause annotation reuse).
487
        # Test adding this in two situations:
488
        # On top of a new insertion
489
        vf = self.get_file('fulltext')
490
        vf.add_lines('noeol', [], ['line'])
491
        vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
492
            left_matching_blocks=[(0, 1, 1)])
493
        self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
494
        # On top of a delta
495
        vf = self.get_file('delta')
496
        vf.add_lines('base', [], ['line'])
497
        vf.add_lines('noeol', ['base'], ['prelude\n', 'line'])
498
        vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
499
            left_matching_blocks=[(1, 1, 1)])
500
        self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
501
2520.4.85 by Aaron Bentley
Get all test passing (which just proves there aren't enough tests!)
502
    def test_make_mpdiffs(self):
2520.4.3 by Aaron Bentley
Implement plain strategy for extracting and installing multiparent diffs
503
        from bzrlib import multiparent
504
        vf = self.get_file('foo')
505
        sha1s = self._setup_for_deltas(vf)
506
        new_vf = self.get_file('bar')
507
        for version in multiparent.topo_iter(vf):
2520.4.85 by Aaron Bentley
Get all test passing (which just proves there aren't enough tests!)
508
            mpdiff = vf.make_mpdiffs([version])[0]
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
509
            new_vf.add_mpdiffs([(version, vf.get_parent_map([version])[version],
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
510
                                 vf.get_sha1s([version])[version], mpdiff)])
2520.4.3 by Aaron Bentley
Implement plain strategy for extracting and installing multiparent diffs
511
            self.assertEqualDiff(vf.get_text(version),
512
                                 new_vf.get_text(version))
513
3453.3.2 by John Arbash Meinel
Add a test case for the first loop, unable to find a way to trigger the second loop
514
    def test_make_mpdiffs_with_ghosts(self):
515
        vf = self.get_file('foo')
3453.3.4 by John Arbash Meinel
Skip the new test for old weave formats that don't support ghosts
516
        try:
517
            vf.add_lines_with_ghosts('text', ['ghost'], ['line\n'])
518
        except NotImplementedError:
519
            # old Weave formats do not allow ghosts
520
            return
3453.3.2 by John Arbash Meinel
Add a test case for the first loop, unable to find a way to trigger the second loop
521
        self.assertRaises(errors.RevisionNotPresent, vf.make_mpdiffs, ['ghost'])
522
1596.2.38 by Robert Collins
rollback from using deltas to using fulltexts - deltas need more work to be ready.
523
    def _setup_for_deltas(self, f):
2794.1.2 by Robert Collins
Nuke versioned file add/get delta support, allowing easy simplification of unannotated Content, reducing memory copies and friction during commit on unannotated texts.
524
        self.assertFalse(f.has_version('base'))
1596.2.36 by Robert Collins
add a get_delta api to versioned_file.
525
        # add texts that should trip the knit maximum delta chain threshold
526
        # as well as doing parallel chains of data in knits.
527
        # this is done by two chains of 25 insertions
528
        f.add_lines('base', [], ['line\n'])
1596.2.38 by Robert Collins
rollback from using deltas to using fulltexts - deltas need more work to be ready.
529
        f.add_lines('noeol', ['base'], ['line'])
530
        # detailed eol tests:
531
        # shared last line with parent no-eol
532
        f.add_lines('noeolsecond', ['noeol'], ['line\n', 'line'])
533
        # differing last line with parent, both no-eol
534
        f.add_lines('noeolnotshared', ['noeolsecond'], ['line\n', 'phone'])
535
        # add eol following a noneol parent, change content
536
        f.add_lines('eol', ['noeol'], ['phone\n'])
537
        # add eol following a noneol parent, no change content
538
        f.add_lines('eolline', ['noeol'], ['line\n'])
539
        # noeol with no parents:
540
        f.add_lines('noeolbase', [], ['line'])
541
        # noeol preceeding its leftmost parent in the output:
542
        # this is done by making it a merge of two parents with no common
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
543
        # anestry: noeolbase and noeol with the
1596.2.38 by Robert Collins
rollback from using deltas to using fulltexts - deltas need more work to be ready.
544
        # later-inserted parent the leftmost.
545
        f.add_lines('eolbeforefirstparent', ['noeolbase', 'noeol'], ['line'])
546
        # two identical eol texts
547
        f.add_lines('noeoldup', ['noeol'], ['line'])
1596.2.36 by Robert Collins
add a get_delta api to versioned_file.
548
        next_parent = 'base'
549
        text_name = 'chain1-'
550
        text = ['line\n']
551
        sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
552
                 1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
553
                 2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
554
                 3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
555
                 4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
556
                 5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
557
                 6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
558
                 7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
559
                 8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
560
                 9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
561
                 10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
562
                 11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
563
                 12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
564
                 13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
565
                 14:'2c4b1736566b8ca6051e668de68650686a3922f2',
566
                 15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
567
                 16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
568
                 17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
569
                 18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
570
                 19:'1ebed371807ba5935958ad0884595126e8c4e823',
571
                 20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
572
                 21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
573
                 22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
574
                 23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
575
                 24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
576
                 25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
577
                 }
578
        for depth in range(26):
579
            new_version = text_name + '%s' % depth
580
            text = text + ['line\n']
581
            f.add_lines(new_version, [next_parent], text)
582
            next_parent = new_version
583
        next_parent = 'base'
584
        text_name = 'chain2-'
585
        text = ['line\n']
586
        for depth in range(26):
587
            new_version = text_name + '%s' % depth
588
            text = text + ['line\n']
589
            f.add_lines(new_version, [next_parent], text)
590
            next_parent = new_version
1596.2.38 by Robert Collins
rollback from using deltas to using fulltexts - deltas need more work to be ready.
591
        return sha1s
1596.2.37 by Robert Collins
Switch to delta based content copying in the generic versioned file copier.
592
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
593
    def test_ancestry(self):
594
        f = self.get_file()
1563.2.29 by Robert Collins
Remove all but fetch references to repository.revision_store.
595
        self.assertEqual([], f.get_ancestry([]))
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
596
        f.add_lines('r0', [], ['a\n', 'b\n'])
597
        f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
598
        f.add_lines('r2', ['r0'], ['b\n', 'c\n'])
599
        f.add_lines('r3', ['r2'], ['b\n', 'c\n'])
600
        f.add_lines('rM', ['r1', 'r2'], ['b\n', 'c\n'])
1563.2.29 by Robert Collins
Remove all but fetch references to repository.revision_store.
601
        self.assertEqual([], f.get_ancestry([]))
1563.2.35 by Robert Collins
cleanup deprecation warnings and finish conversion so the inventory is knit based too.
602
        versions = f.get_ancestry(['rM'])
603
        # there are some possibilities:
604
        # r0 r1 r2 rM r3
605
        # r0 r1 r2 r3 rM
606
        # etc
607
        # so we check indexes
608
        r0 = versions.index('r0')
609
        r1 = versions.index('r1')
610
        r2 = versions.index('r2')
611
        self.assertFalse('r3' in versions)
612
        rM = versions.index('rM')
613
        self.assertTrue(r0 < r1)
614
        self.assertTrue(r0 < r2)
615
        self.assertTrue(r1 < rM)
616
        self.assertTrue(r2 < rM)
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
617
618
        self.assertRaises(RevisionNotPresent,
619
            f.get_ancestry, ['rM', 'rX'])
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
620
2530.1.1 by Aaron Bentley
Make topological sorting optional for get_ancestry
621
        self.assertEqual(set(f.get_ancestry('rM')),
622
            set(f.get_ancestry('rM', topo_sorted=False)))
623
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
624
    def test_mutate_after_finish(self):
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
625
        self._transaction = 'before'
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
626
        f = self.get_file()
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
627
        self._transaction = 'after'
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
628
        self.assertRaises(errors.OutSideTransaction, f.add_lines, '', [], [])
629
        self.assertRaises(errors.OutSideTransaction, f.add_lines_with_ghosts, '', [], [])
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
630
1563.2.15 by Robert Collins
remove the weavestore assumptions about the number and nature of files it manages.
631
    def test_copy_to(self):
632
        f = self.get_file()
633
        f.add_lines('0', [], ['a\n'])
634
        t = MemoryTransport()
635
        f.copy_to('foo', t)
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
636
        for suffix in self.get_factory().get_suffixes():
1563.2.15 by Robert Collins
remove the weavestore assumptions about the number and nature of files it manages.
637
            self.assertTrue(t.has('foo' + suffix))
638
639
    def test_get_suffixes(self):
640
        f = self.get_file()
641
        # and should be a list
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
642
        self.assertTrue(isinstance(self.get_factory().get_suffixes(), list))
1563.2.15 by Robert Collins
remove the weavestore assumptions about the number and nature of files it manages.
643
3287.5.1 by Robert Collins
Add VersionedFile.get_parent_map.
644
    def test_get_parent_map(self):
645
        f = self.get_file()
646
        f.add_lines('r0', [], ['a\n', 'b\n'])
647
        self.assertEqual(
648
            {'r0':()}, f.get_parent_map(['r0']))
649
        f.add_lines('r1', ['r0'], ['a\n', 'b\n'])
650
        self.assertEqual(
651
            {'r1':('r0',)}, f.get_parent_map(['r1']))
652
        self.assertEqual(
653
            {'r0':(),
654
             'r1':('r0',)},
655
            f.get_parent_map(['r0', 'r1']))
656
        f.add_lines('r2', [], ['a\n', 'b\n'])
657
        f.add_lines('r3', [], ['a\n', 'b\n'])
658
        f.add_lines('m', ['r0', 'r1', 'r2', 'r3'], ['a\n', 'b\n'])
659
        self.assertEqual(
660
            {'m':('r0', 'r1', 'r2', 'r3')}, f.get_parent_map(['m']))
661
        self.assertEqual({}, f.get_parent_map('y'))
662
        self.assertEqual(
663
            {'r0':(),
664
             'r1':('r0',)},
665
            f.get_parent_map(['r0', 'y', 'r1']))
666
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
667
    def test_annotate(self):
668
        f = self.get_file()
669
        f.add_lines('r0', [], ['a\n', 'b\n'])
670
        f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
671
        origins = f.annotate('r1')
672
        self.assertEquals(origins[0][0], 'r1')
673
        self.assertEquals(origins[1][0], 'r0')
674
675
        self.assertRaises(RevisionNotPresent,
676
            f.annotate, 'foo')
677
1563.2.6 by Robert Collins
Start check tests for knits (pending), and remove dead code.
678
    def test_detection(self):
679
        # Test weaves detect corruption.
680
        #
681
        # Weaves contain a checksum of their texts.
682
        # When a text is extracted, this checksum should be
683
        # verified.
684
685
        w = self.get_file_corrupted_text()
686
687
        self.assertEqual('hello\n', w.get_text('v1'))
688
        self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
689
        self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
690
        self.assertRaises(errors.WeaveInvalidChecksum, w.check)
691
692
        w = self.get_file_corrupted_checksum()
693
694
        self.assertEqual('hello\n', w.get_text('v1'))
695
        self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
696
        self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
697
        self.assertRaises(errors.WeaveInvalidChecksum, w.check)
698
699
    def get_file_corrupted_text(self):
700
        """Return a versioned file with corrupt text but valid metadata."""
701
        raise NotImplementedError(self.get_file_corrupted_text)
702
1563.2.9 by Robert Collins
Update versionedfile api tests to ensure that data is available after every operation.
703
    def reopen_file(self, name='foo'):
704
        """Open the versioned file from disk again."""
705
        raise NotImplementedError(self.reopen_file)
706
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
707
    def test_iter_lines_added_or_present_in_versions(self):
708
        # test that we get at least an equalset of the lines added by
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
709
        # versions in the weave
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
710
        # the ordering here is to make a tree so that dumb searches have
711
        # more changes to muck up.
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
712
713
        class InstrumentedProgress(progress.DummyProgress):
714
715
            def __init__(self):
716
717
                progress.DummyProgress.__init__(self)
718
                self.updates = []
719
720
            def update(self, msg=None, current=None, total=None):
721
                self.updates.append((msg, current, total))
722
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
723
        vf = self.get_file()
724
        # add a base to get included
725
        vf.add_lines('base', [], ['base\n'])
726
        # add a ancestor to be included on one side
727
        vf.add_lines('lancestor', [], ['lancestor\n'])
728
        # add a ancestor to be included on the other side
729
        vf.add_lines('rancestor', ['base'], ['rancestor\n'])
730
        # add a child of rancestor with no eofile-nl
731
        vf.add_lines('child', ['rancestor'], ['base\n', 'child\n'])
732
        # add a child of lancestor and base to join the two roots
733
        vf.add_lines('otherchild',
734
                     ['lancestor', 'base'],
735
                     ['base\n', 'lancestor\n', 'otherchild\n'])
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
736
        def iter_with_versions(versions, expected):
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
737
            # now we need to see what lines are returned, and how often.
2975.3.1 by Robert Collins
Change (without backwards compatibility) the
738
            lines = {}
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
739
            progress = InstrumentedProgress()
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
740
            # iterate over the lines
2975.3.1 by Robert Collins
Change (without backwards compatibility) the
741
            for line in vf.iter_lines_added_or_present_in_versions(versions,
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
742
                pb=progress):
2975.3.1 by Robert Collins
Change (without backwards compatibility) the
743
                lines.setdefault(line, 0)
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
744
                lines[line] += 1
2975.3.1 by Robert Collins
Change (without backwards compatibility) the
745
            if []!= progress.updates:
2039.1.2 by Aaron Bentley
Tweak test to avoid catching assert
746
                self.assertEqual(expected, progress.updates)
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
747
            return lines
2147.1.3 by John Arbash Meinel
In knit.py we were re-using a variable in 2 loops, causing bogus progress messages to be generated.
748
        lines = iter_with_versions(['child', 'otherchild'],
4103.3.4 by Martin Pool
Update test that depends on progress bar strings
749
                                   [('Walking content', 0, 2),
750
                                    ('Walking content', 1, 2),
751
                                    ('Walking content', 2, 2)])
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
752
        # we must see child and otherchild
2975.3.1 by Robert Collins
Change (without backwards compatibility) the
753
        self.assertTrue(lines[('child\n', 'child')] > 0)
754
        self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
755
        # we dont care if we got more than that.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
756
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
757
        # test all lines
4103.3.4 by Martin Pool
Update test that depends on progress bar strings
758
        lines = iter_with_versions(None, [('Walking content', 0, 5),
759
                                          ('Walking content', 1, 5),
760
                                          ('Walking content', 2, 5),
761
                                          ('Walking content', 3, 5),
762
                                          ('Walking content', 4, 5),
763
                                          ('Walking content', 5, 5)])
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
764
        # all lines must be seen at least once
2975.3.1 by Robert Collins
Change (without backwards compatibility) the
765
        self.assertTrue(lines[('base\n', 'base')] > 0)
766
        self.assertTrue(lines[('lancestor\n', 'lancestor')] > 0)
767
        self.assertTrue(lines[('rancestor\n', 'rancestor')] > 0)
768
        self.assertTrue(lines[('child\n', 'child')] > 0)
769
        self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
1594.2.7 by Robert Collins
Add versionedfile.fix_parents api for correcting data post hoc.
770
1594.2.8 by Robert Collins
add ghost aware apis to knits.
771
    def test_add_lines_with_ghosts(self):
772
        # some versioned file formats allow lines to be added with parent
773
        # information that is > than that in the format. Formats that do
774
        # not support this need to raise NotImplementedError on the
775
        # add_lines_with_ghosts api.
776
        vf = self.get_file()
777
        # add a revision with ghost parents
2249.5.12 by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8
778
        # The preferred form is utf8, but we should translate when needed
779
        parent_id_unicode = u'b\xbfse'
780
        parent_id_utf8 = parent_id_unicode.encode('utf8')
1594.2.8 by Robert Collins
add ghost aware apis to knits.
781
        try:
2309.4.7 by John Arbash Meinel
Update VersionedFile tests to ensure that they can take Unicode,
782
            vf.add_lines_with_ghosts('notbxbfse', [parent_id_utf8], [])
1594.2.8 by Robert Collins
add ghost aware apis to knits.
783
        except NotImplementedError:
784
            # check the other ghost apis are also not implemented
785
            self.assertRaises(NotImplementedError, vf.get_ancestry_with_ghosts, ['foo'])
786
            self.assertRaises(NotImplementedError, vf.get_parents_with_ghosts, 'foo')
787
            return
2150.2.1 by Robert Collins
Correctly decode utf8 revision ids from knits when parsing, fixes a regression where a unicode revision id is stored correctly, but then indexed by the utf8 value on the next invocation of bzr, rather than the unicode value.
788
        vf = self.reopen_file()
1594.2.8 by Robert Collins
add ghost aware apis to knits.
789
        # test key graph related apis: getncestry, _graph, get_parents
790
        # has_version
791
        # - these are ghost unaware and must not be reflect ghosts
2249.5.12 by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8
792
        self.assertEqual(['notbxbfse'], vf.get_ancestry('notbxbfse'))
793
        self.assertFalse(vf.has_version(parent_id_utf8))
1594.2.8 by Robert Collins
add ghost aware apis to knits.
794
        # we have _with_ghost apis to give us ghost information.
2249.5.12 by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8
795
        self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry_with_ghosts(['notbxbfse']))
796
        self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
1594.2.8 by Robert Collins
add ghost aware apis to knits.
797
        # if we add something that is a ghost of another, it should correct the
798
        # results of the prior apis
2858.2.1 by Martin Pool
Remove most calls to safe_file_id and safe_revision_id.
799
        vf.add_lines(parent_id_utf8, [], [])
2249.5.12 by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8
800
        self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry(['notbxbfse']))
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
801
        self.assertEqual({'notbxbfse':(parent_id_utf8,)},
802
            vf.get_parent_map(['notbxbfse']))
2249.5.12 by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8
803
        self.assertTrue(vf.has_version(parent_id_utf8))
1594.2.8 by Robert Collins
add ghost aware apis to knits.
804
        # we have _with_ghost apis to give us ghost information.
2858.2.1 by Martin Pool
Remove most calls to safe_file_id and safe_revision_id.
805
        self.assertEqual([parent_id_utf8, 'notbxbfse'],
806
            vf.get_ancestry_with_ghosts(['notbxbfse']))
2249.5.12 by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8
807
        self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
1594.2.8 by Robert Collins
add ghost aware apis to knits.
808
1594.2.9 by Robert Collins
Teach Knit repositories how to handle ghosts without corrupting at all.
809
    def test_add_lines_with_ghosts_after_normal_revs(self):
810
        # some versioned file formats allow lines to be added with parent
811
        # information that is > than that in the format. Formats that do
812
        # not support this need to raise NotImplementedError on the
813
        # add_lines_with_ghosts api.
814
        vf = self.get_file()
815
        # probe for ghost support
816
        try:
3287.6.5 by Robert Collins
Deprecate VersionedFile.has_ghost.
817
            vf.add_lines_with_ghosts('base', [], ['line\n', 'line_b\n'])
1594.2.9 by Robert Collins
Teach Knit repositories how to handle ghosts without corrupting at all.
818
        except NotImplementedError:
819
            return
820
        vf.add_lines_with_ghosts('references_ghost',
821
                                 ['base', 'a_ghost'],
822
                                 ['line\n', 'line_b\n', 'line_c\n'])
823
        origins = vf.annotate('references_ghost')
824
        self.assertEquals(('base', 'line\n'), origins[0])
825
        self.assertEquals(('base', 'line_b\n'), origins[1])
826
        self.assertEquals(('references_ghost', 'line_c\n'), origins[2])
1594.2.23 by Robert Collins
Test versioned file storage handling of clean/dirty status for accessed versioned files.
827
828
    def test_readonly_mode(self):
829
        transport = get_transport(self.get_url('.'))
830
        factory = self.get_factory()
831
        vf = factory('id', transport, 0777, create=True, access_mode='w')
832
        vf = factory('id', transport, access_mode='r')
833
        self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
834
        self.assertRaises(errors.ReadOnlyError,
835
                          vf.add_lines_with_ghosts,
836
                          'base',
837
                          [],
838
                          [])
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
839
3316.2.9 by Robert Collins
* ``VersionedFile.get_sha1`` is deprecated, please use
840
    def test_get_sha1s(self):
1666.1.6 by Robert Collins
Make knit the default format.
841
        # check the sha1 data is available
842
        vf = self.get_file()
843
        # a simple file
844
        vf.add_lines('a', [], ['a\n'])
845
        # the same file, different metadata
846
        vf.add_lines('b', ['a'], ['a\n'])
847
        # a file differing only in last newline.
848
        vf.add_lines('c', [], ['a'])
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
849
        self.assertEqual({
850
            'a': '3f786850e387550fdab836ed7e6dc881de23001b',
851
            'c': '86f7e437faa5a7fce15d1ddcb9eaeaea377667b8',
852
            'b': '3f786850e387550fdab836ed7e6dc881de23001b',
853
            },
854
            vf.get_sha1s(['a', 'c', 'b']))
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
855
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
856
2535.3.1 by Andrew Bennetts
Add get_format_signature to VersionedFile
857
class TestWeave(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
858
859
    def get_file(self, name='foo'):
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
860
        return WeaveFile(name, get_transport(self.get_url('.')), create=True,
861
            get_scope=self.get_transaction)
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
862
1563.2.6 by Robert Collins
Start check tests for knits (pending), and remove dead code.
863
    def get_file_corrupted_text(self):
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
864
        w = WeaveFile('foo', get_transport(self.get_url('.')), create=True,
865
            get_scope=self.get_transaction)
1563.2.13 by Robert Collins
InterVersionedFile implemented.
866
        w.add_lines('v1', [], ['hello\n'])
867
        w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
868
1563.2.6 by Robert Collins
Start check tests for knits (pending), and remove dead code.
869
        # We are going to invasively corrupt the text
870
        # Make sure the internals of weave are the same
871
        self.assertEqual([('{', 0)
872
                        , 'hello\n'
873
                        , ('}', None)
874
                        , ('{', 1)
875
                        , 'there\n'
876
                        , ('}', None)
877
                        ], w._weave)
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
878
1563.2.6 by Robert Collins
Start check tests for knits (pending), and remove dead code.
879
        self.assertEqual(['f572d396fae9206628714fb2ce00f72e94f2258f'
880
                        , '90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
881
                        ], w._sha1s)
882
        w.check()
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
883
1563.2.6 by Robert Collins
Start check tests for knits (pending), and remove dead code.
884
        # Corrupted
885
        w._weave[4] = 'There\n'
886
        return w
887
888
    def get_file_corrupted_checksum(self):
889
        w = self.get_file_corrupted_text()
890
        # Corrected
891
        w._weave[4] = 'there\n'
892
        self.assertEqual('hello\nthere\n', w.get_text('v2'))
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
893
1563.2.6 by Robert Collins
Start check tests for knits (pending), and remove dead code.
894
        #Invalid checksum, first digit changed
895
        w._sha1s[1] =  'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'
896
        return w
897
1666.1.6 by Robert Collins
Make knit the default format.
898
    def reopen_file(self, name='foo', create=False):
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
899
        return WeaveFile(name, get_transport(self.get_url('.')), create=create,
900
            get_scope=self.get_transaction)
1563.2.9 by Robert Collins
Update versionedfile api tests to ensure that data is available after every operation.
901
1563.2.25 by Robert Collins
Merge in upstream.
902
    def test_no_implicit_create(self):
903
        self.assertRaises(errors.NoSuchFile,
904
                          WeaveFile,
905
                          'foo',
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
906
                          get_transport(self.get_url('.')),
907
                          get_scope=self.get_transaction)
1563.2.25 by Robert Collins
Merge in upstream.
908
1594.2.23 by Robert Collins
Test versioned file storage handling of clean/dirty status for accessed versioned files.
909
    def get_factory(self):
910
        return WeaveFile
911
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
912
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
913
class TestPlanMergeVersionedFile(TestCaseWithMemoryTransport):
914
915
    def setUp(self):
916
        TestCaseWithMemoryTransport.setUp(self)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
917
        mapper = PrefixMapper()
918
        factory = make_file_factory(True, mapper)
919
        self.vf1 = factory(self.get_transport('root-1'))
920
        self.vf2 = factory(self.get_transport('root-2'))
921
        self.plan_merge_vf = versionedfile._PlanMergeVersionedFile('root')
922
        self.plan_merge_vf.fallback_versionedfiles.extend([self.vf1, self.vf2])
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
923
924
    def test_add_lines(self):
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
925
        self.plan_merge_vf.add_lines(('root', 'a:'), [], [])
926
        self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
927
            ('root', 'a'), [], [])
928
        self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
929
            ('root', 'a:'), None, [])
930
        self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
931
            ('root', 'a:'), [], None)
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
932
933
    def setup_abcde(self):
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
934
        self.vf1.add_lines(('root', 'A'), [], ['a'])
935
        self.vf1.add_lines(('root', 'B'), [('root', 'A')], ['b'])
936
        self.vf2.add_lines(('root', 'C'), [], ['c'])
937
        self.vf2.add_lines(('root', 'D'), [('root', 'C')], ['d'])
938
        self.plan_merge_vf.add_lines(('root', 'E:'),
939
            [('root', 'B'), ('root', 'D')], ['e'])
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
940
941
    def test_get_parents(self):
942
        self.setup_abcde()
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
943
        self.assertEqual({('root', 'B'):(('root', 'A'),)},
944
            self.plan_merge_vf.get_parent_map([('root', 'B')]))
945
        self.assertEqual({('root', 'D'):(('root', 'C'),)},
946
            self.plan_merge_vf.get_parent_map([('root', 'D')]))
947
        self.assertEqual({('root', 'E:'):(('root', 'B'),('root', 'D'))},
948
            self.plan_merge_vf.get_parent_map([('root', 'E:')]))
949
        self.assertEqual({},
950
            self.plan_merge_vf.get_parent_map([('root', 'F')]))
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
951
        self.assertEqual({
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
952
                ('root', 'B'):(('root', 'A'),),
953
                ('root', 'D'):(('root', 'C'),),
954
                ('root', 'E:'):(('root', 'B'),('root', 'D')),
955
                },
956
            self.plan_merge_vf.get_parent_map(
957
                [('root', 'B'), ('root', 'D'), ('root', 'E:'), ('root', 'F')]))
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
958
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
959
    def test_get_record_stream(self):
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
960
        self.setup_abcde()
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
961
        def get_record(suffix):
962
            return self.plan_merge_vf.get_record_stream(
963
                [('root', suffix)], 'unordered', True).next()
964
        self.assertEqual('a', get_record('A').get_bytes_as('fulltext'))
965
        self.assertEqual('c', get_record('C').get_bytes_as('fulltext'))
966
        self.assertEqual('e', get_record('E:').get_bytes_as('fulltext'))
967
        self.assertEqual('absent', get_record('F').storage_kind)
1666.1.1 by Robert Collins
Add trivial http-using test for versioned files.
968
969
970
class TestReadonlyHttpMixin(object):
971
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
972
    def get_transaction(self):
973
        return 1
974
1666.1.1 by Robert Collins
Add trivial http-using test for versioned files.
975
    def test_readonly_http_works(self):
976
        # we should be able to read from http with a versioned file.
977
        vf = self.get_file()
1666.1.6 by Robert Collins
Make knit the default format.
978
        # try an empty file access
979
        readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
980
        self.assertEqual([], readonly_vf.versions())
981
        # now with feeling.
1666.1.1 by Robert Collins
Add trivial http-using test for versioned files.
982
        vf.add_lines('1', [], ['a\n'])
983
        vf.add_lines('2', ['1'], ['b\n', 'a\n'])
984
        readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
1666.1.6 by Robert Collins
Make knit the default format.
985
        self.assertEqual(['1', '2'], vf.versions())
1666.1.1 by Robert Collins
Add trivial http-using test for versioned files.
986
        for version in readonly_vf.versions():
987
            readonly_vf.get_lines(version)
988
989
990
class TestWeaveHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
991
992
    def get_file(self):
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
993
        return WeaveFile('foo', get_transport(self.get_url('.')), create=True,
994
            get_scope=self.get_transaction)
1666.1.1 by Robert Collins
Add trivial http-using test for versioned files.
995
996
    def get_factory(self):
997
        return WeaveFile
998
999
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1000
class MergeCasesMixin(object):
1001
1002
    def doMerge(self, base, a, b, mp):
1003
        from cStringIO import StringIO
1004
        from textwrap import dedent
1005
1006
        def addcrlf(x):
1007
            return x + '\n'
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1008
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1009
        w = self.get_file()
1010
        w.add_lines('text0', [], map(addcrlf, base))
1011
        w.add_lines('text1', ['text0'], map(addcrlf, a))
1012
        w.add_lines('text2', ['text0'], map(addcrlf, b))
1013
1014
        self.log_contents(w)
1015
1016
        self.log('merge plan:')
1017
        p = list(w.plan_merge('text1', 'text2'))
1018
        for state, line in p:
1019
            if line:
1020
                self.log('%12s | %s' % (state, line[:-1]))
1021
1022
        self.log('merge:')
1023
        mt = StringIO()
1024
        mt.writelines(w.weave_merge(p))
1025
        mt.seek(0)
1026
        self.log(mt.getvalue())
1027
1028
        mp = map(addcrlf, mp)
1029
        self.assertEqual(mt.readlines(), mp)
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1030
1031
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1032
    def testOneInsert(self):
1033
        self.doMerge([],
1034
                     ['aa'],
1035
                     [],
1036
                     ['aa'])
1037
1038
    def testSeparateInserts(self):
1039
        self.doMerge(['aaa', 'bbb', 'ccc'],
1040
                     ['aaa', 'xxx', 'bbb', 'ccc'],
1041
                     ['aaa', 'bbb', 'yyy', 'ccc'],
1042
                     ['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1043
1044
    def testSameInsert(self):
1045
        self.doMerge(['aaa', 'bbb', 'ccc'],
1046
                     ['aaa', 'xxx', 'bbb', 'ccc'],
1047
                     ['aaa', 'xxx', 'bbb', 'yyy', 'ccc'],
1048
                     ['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1049
    overlappedInsertExpected = ['aaa', 'xxx', 'yyy', 'bbb']
1050
    def testOverlappedInsert(self):
1051
        self.doMerge(['aaa', 'bbb'],
1052
                     ['aaa', 'xxx', 'yyy', 'bbb'],
1053
                     ['aaa', 'xxx', 'bbb'], self.overlappedInsertExpected)
1054
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1055
        # really it ought to reduce this to
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1056
        # ['aaa', 'xxx', 'yyy', 'bbb']
1057
1058
1059
    def testClashReplace(self):
1060
        self.doMerge(['aaa'],
1061
                     ['xxx'],
1062
                     ['yyy', 'zzz'],
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1063
                     ['<<<<<<< ', 'xxx', '=======', 'yyy', 'zzz',
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1064
                      '>>>>>>> '])
1065
1066
    def testNonClashInsert1(self):
1067
        self.doMerge(['aaa'],
1068
                     ['xxx', 'aaa'],
1069
                     ['yyy', 'zzz'],
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1070
                     ['<<<<<<< ', 'xxx', 'aaa', '=======', 'yyy', 'zzz',
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1071
                      '>>>>>>> '])
1072
1073
    def testNonClashInsert2(self):
1074
        self.doMerge(['aaa'],
1075
                     ['aaa'],
1076
                     ['yyy', 'zzz'],
1077
                     ['yyy', 'zzz'])
1078
1079
1080
    def testDeleteAndModify(self):
1081
        """Clashing delete and modification.
1082
1083
        If one side modifies a region and the other deletes it then
1084
        there should be a conflict with one side blank.
1085
        """
1086
1087
        #######################################
1088
        # skippd, not working yet
1089
        return
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1090
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1091
        self.doMerge(['aaa', 'bbb', 'ccc'],
1092
                     ['aaa', 'ddd', 'ccc'],
1093
                     ['aaa', 'ccc'],
1094
                     ['<<<<<<<< ', 'aaa', '=======', '>>>>>>> ', 'ccc'])
1095
1096
    def _test_merge_from_strings(self, base, a, b, expected):
1097
        w = self.get_file()
1098
        w.add_lines('text0', [], base.splitlines(True))
1099
        w.add_lines('text1', ['text0'], a.splitlines(True))
1100
        w.add_lines('text2', ['text0'], b.splitlines(True))
1101
        self.log('merge plan:')
1102
        p = list(w.plan_merge('text1', 'text2'))
1103
        for state, line in p:
1104
            if line:
1105
                self.log('%12s | %s' % (state, line[:-1]))
1106
        self.log('merge result:')
1107
        result_text = ''.join(w.weave_merge(p))
1108
        self.log(result_text)
1109
        self.assertEqualDiff(result_text, expected)
1110
1111
    def test_weave_merge_conflicts(self):
1112
        # does weave merge properly handle plans that end with unchanged?
1113
        result = ''.join(self.get_file().weave_merge([('new-a', 'hello\n')]))
1114
        self.assertEqual(result, 'hello\n')
1115
1116
    def test_deletion_extended(self):
1117
        """One side deletes, the other deletes more.
1118
        """
1119
        base = """\
1120
            line 1
1121
            line 2
1122
            line 3
1123
            """
1124
        a = """\
1125
            line 1
1126
            line 2
1127
            """
1128
        b = """\
1129
            line 1
1130
            """
1131
        result = """\
1132
            line 1
1133
            """
1134
        self._test_merge_from_strings(base, a, b, result)
1135
1136
    def test_deletion_overlap(self):
1137
        """Delete overlapping regions with no other conflict.
1138
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1139
        Arguably it'd be better to treat these as agreement, rather than
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1140
        conflict, but for now conflict is safer.
1141
        """
1142
        base = """\
1143
            start context
1144
            int a() {}
1145
            int b() {}
1146
            int c() {}
1147
            end context
1148
            """
1149
        a = """\
1150
            start context
1151
            int a() {}
1152
            end context
1153
            """
1154
        b = """\
1155
            start context
1156
            int c() {}
1157
            end context
1158
            """
1159
        result = """\
1160
            start context
3943.8.2 by Marius Kruger
fix tests relying on trailing whitespace by replacing it with \x20.
1161
<<<<<<<\x20
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1162
            int a() {}
1163
=======
1164
            int c() {}
3943.8.2 by Marius Kruger
fix tests relying on trailing whitespace by replacing it with \x20.
1165
>>>>>>>\x20
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1166
            end context
1167
            """
1168
        self._test_merge_from_strings(base, a, b, result)
1169
1170
    def test_agreement_deletion(self):
1171
        """Agree to delete some lines, without conflicts."""
1172
        base = """\
1173
            start context
1174
            base line 1
1175
            base line 2
1176
            end context
1177
            """
1178
        a = """\
1179
            start context
1180
            base line 1
1181
            end context
1182
            """
1183
        b = """\
1184
            start context
1185
            base line 1
1186
            end context
1187
            """
1188
        result = """\
1189
            start context
1190
            base line 1
1191
            end context
1192
            """
1193
        self._test_merge_from_strings(base, a, b, result)
1194
1195
    def test_sync_on_deletion(self):
1196
        """Specific case of merge where we can synchronize incorrectly.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1197
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1198
        A previous version of the weave merge concluded that the two versions
1199
        agreed on deleting line 2, and this could be a synchronization point.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1200
        Line 1 was then considered in isolation, and thought to be deleted on
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1201
        both sides.
1202
1203
        It's better to consider the whole thing as a disagreement region.
1204
        """
1205
        base = """\
1206
            start context
1207
            base line 1
1208
            base line 2
1209
            end context
1210
            """
1211
        a = """\
1212
            start context
1213
            base line 1
1214
            a's replacement line 2
1215
            end context
1216
            """
1217
        b = """\
1218
            start context
1219
            b replaces
1220
            both lines
1221
            end context
1222
            """
1223
        result = """\
1224
            start context
3943.8.2 by Marius Kruger
fix tests relying on trailing whitespace by replacing it with \x20.
1225
<<<<<<<\x20
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1226
            base line 1
1227
            a's replacement line 2
1228
=======
1229
            b replaces
1230
            both lines
3943.8.2 by Marius Kruger
fix tests relying on trailing whitespace by replacing it with \x20.
1231
>>>>>>>\x20
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1232
            end context
1233
            """
1234
        self._test_merge_from_strings(base, a, b, result)
1235
1236
2535.3.1 by Andrew Bennetts
Add get_format_signature to VersionedFile
1237
class TestWeaveMerge(TestCaseWithMemoryTransport, MergeCasesMixin):
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1238
1239
    def get_file(self, name='foo'):
1240
        return WeaveFile(name, get_transport(self.get_url('.')), create=True)
1241
1242
    def log_contents(self, w):
1243
        self.log('weave is:')
1244
        tmpf = StringIO()
1245
        write_weave(w, tmpf)
1246
        self.log(tmpf.getvalue())
1247
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1248
    overlappedInsertExpected = ['aaa', '<<<<<<< ', 'xxx', 'yyy', '=======',
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1249
                                'xxx', '>>>>>>> ', 'bbb']
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1250
1251
1252
class TestContentFactoryAdaption(TestCaseWithMemoryTransport):
1253
1254
    def test_select_adaptor(self):
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
1255
        """Test expected adapters exist."""
1256
        # One scenario for each lookup combination we expect to use.
1257
        # Each is source_kind, requested_kind, adapter class
1258
        scenarios = [
1259
            ('knit-delta-gz', 'fulltext', _mod_knit.DeltaPlainToFullText),
1260
            ('knit-ft-gz', 'fulltext', _mod_knit.FTPlainToFullText),
1261
            ('knit-annotated-delta-gz', 'knit-delta-gz',
1262
                _mod_knit.DeltaAnnotatedToUnannotated),
1263
            ('knit-annotated-delta-gz', 'fulltext',
1264
                _mod_knit.DeltaAnnotatedToFullText),
1265
            ('knit-annotated-ft-gz', 'knit-ft-gz',
1266
                _mod_knit.FTAnnotatedToUnannotated),
1267
            ('knit-annotated-ft-gz', 'fulltext',
1268
                _mod_knit.FTAnnotatedToFullText),
1269
            ]
1270
        for source, requested, klass in scenarios:
1271
            adapter_factory = versionedfile.adapter_registry.get(
1272
                (source, requested))
1273
            adapter = adapter_factory(None)
1274
            self.assertIsInstance(adapter, klass)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1275
3350.3.5 by Robert Collins
Create adapters from plain compressed knit content.
1276
    def get_knit(self, annotated=True):
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1277
        mapper = ConstantMapper('knit')
1278
        transport = self.get_transport()
1279
        return make_file_factory(annotated, mapper)(transport)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1280
1281
    def helpGetBytes(self, f, ft_adapter, delta_adapter):
3350.3.22 by Robert Collins
Review feedback.
1282
        """Grab the interested adapted texts for tests."""
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1283
        # origin is a fulltext
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1284
        entries = f.get_record_stream([('origin',)], 'unordered', False)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1285
        base = entries.next()
4005.3.1 by Robert Collins
Change the signature on VersionedFiles adapters to allow less typing and more flexability inside adapters.
1286
        ft_data = ft_adapter.get_bytes(base)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1287
        # merged is both a delta and multiple parents.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1288
        entries = f.get_record_stream([('merged',)], 'unordered', False)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1289
        merged = entries.next()
4005.3.1 by Robert Collins
Change the signature on VersionedFiles adapters to allow less typing and more flexability inside adapters.
1290
        delta_data = delta_adapter.get_bytes(merged)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1291
        return ft_data, delta_data
1292
1293
    def test_deannotation_noeol(self):
1294
        """Test converting annotated knits to unannotated knits."""
1295
        # we need a full text, and a delta
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1296
        f = self.get_knit()
1297
        get_diamond_files(f, 1, trailing_eol=False)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1298
        ft_data, delta_data = self.helpGetBytes(f,
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
1299
            _mod_knit.FTAnnotatedToUnannotated(None),
1300
            _mod_knit.DeltaAnnotatedToUnannotated(None))
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1301
        self.assertEqual(
1302
            'version origin 1 b284f94827db1fa2970d9e2014f080413b547a7e\n'
1303
            'origin\n'
1304
            'end origin\n',
1305
            GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1306
        self.assertEqual(
1307
            'version merged 4 32c2e79763b3f90e8ccde37f9710b6629c25a796\n'
1308
            '1,2,3\nleft\nright\nmerged\nend merged\n',
1309
            GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1310
1311
    def test_deannotation(self):
1312
        """Test converting annotated knits to unannotated knits."""
1313
        # we need a full text, and a delta
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1314
        f = self.get_knit()
1315
        get_diamond_files(f, 1)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1316
        ft_data, delta_data = self.helpGetBytes(f,
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
1317
            _mod_knit.FTAnnotatedToUnannotated(None),
1318
            _mod_knit.DeltaAnnotatedToUnannotated(None))
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1319
        self.assertEqual(
1320
            'version origin 1 00e364d235126be43292ab09cb4686cf703ddc17\n'
1321
            'origin\n'
1322
            'end origin\n',
1323
            GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1324
        self.assertEqual(
1325
            'version merged 3 ed8bce375198ea62444dc71952b22cfc2b09226d\n'
1326
            '2,2,2\nright\nmerged\nend merged\n',
1327
            GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1328
1329
    def test_annotated_to_fulltext_no_eol(self):
1330
        """Test adapting annotated knits to full texts (for -> weaves)."""
1331
        # we need a full text, and a delta
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1332
        f = self.get_knit()
1333
        get_diamond_files(f, 1, trailing_eol=False)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1334
        # Reconstructing a full text requires a backing versioned file, and it
1335
        # must have the base lines requested from it.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1336
        logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1337
        ft_data, delta_data = self.helpGetBytes(f,
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
1338
            _mod_knit.FTAnnotatedToFullText(None),
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1339
            _mod_knit.DeltaAnnotatedToFullText(logged_vf))
1340
        self.assertEqual('origin', ft_data)
1341
        self.assertEqual('base\nleft\nright\nmerged', delta_data)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1342
        self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1343
            True)], logged_vf.calls)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1344
1345
    def test_annotated_to_fulltext(self):
1346
        """Test adapting annotated knits to full texts (for -> weaves)."""
1347
        # we need a full text, and a delta
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1348
        f = self.get_knit()
1349
        get_diamond_files(f, 1)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1350
        # Reconstructing a full text requires a backing versioned file, and it
1351
        # must have the base lines requested from it.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1352
        logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1353
        ft_data, delta_data = self.helpGetBytes(f,
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
1354
            _mod_knit.FTAnnotatedToFullText(None),
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1355
            _mod_knit.DeltaAnnotatedToFullText(logged_vf))
1356
        self.assertEqual('origin\n', ft_data)
1357
        self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1358
        self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1359
            True)], logged_vf.calls)
3350.3.5 by Robert Collins
Create adapters from plain compressed knit content.
1360
1361
    def test_unannotated_to_fulltext(self):
1362
        """Test adapting unannotated knits to full texts.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1363
3350.3.5 by Robert Collins
Create adapters from plain compressed knit content.
1364
        This is used for -> weaves, and for -> annotated knits.
1365
        """
1366
        # we need a full text, and a delta
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1367
        f = self.get_knit(annotated=False)
1368
        get_diamond_files(f, 1)
3350.3.5 by Robert Collins
Create adapters from plain compressed knit content.
1369
        # Reconstructing a full text requires a backing versioned file, and it
1370
        # must have the base lines requested from it.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1371
        logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
3350.3.5 by Robert Collins
Create adapters from plain compressed knit content.
1372
        ft_data, delta_data = self.helpGetBytes(f,
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
1373
            _mod_knit.FTPlainToFullText(None),
3350.3.5 by Robert Collins
Create adapters from plain compressed knit content.
1374
            _mod_knit.DeltaPlainToFullText(logged_vf))
1375
        self.assertEqual('origin\n', ft_data)
1376
        self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1377
        self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1378
            True)], logged_vf.calls)
3350.3.5 by Robert Collins
Create adapters from plain compressed knit content.
1379
3350.3.6 by Robert Collins
Test EOL behaviour of plain knit record adapters.
1380
    def test_unannotated_to_fulltext_no_eol(self):
1381
        """Test adapting unannotated knits to full texts.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1382
3350.3.6 by Robert Collins
Test EOL behaviour of plain knit record adapters.
1383
        This is used for -> weaves, and for -> annotated knits.
1384
        """
1385
        # we need a full text, and a delta
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1386
        f = self.get_knit(annotated=False)
1387
        get_diamond_files(f, 1, trailing_eol=False)
3350.3.6 by Robert Collins
Test EOL behaviour of plain knit record adapters.
1388
        # Reconstructing a full text requires a backing versioned file, and it
1389
        # must have the base lines requested from it.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1390
        logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
3350.3.6 by Robert Collins
Test EOL behaviour of plain knit record adapters.
1391
        ft_data, delta_data = self.helpGetBytes(f,
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
1392
            _mod_knit.FTPlainToFullText(None),
3350.3.6 by Robert Collins
Test EOL behaviour of plain knit record adapters.
1393
            _mod_knit.DeltaPlainToFullText(logged_vf))
1394
        self.assertEqual('origin', ft_data)
1395
        self.assertEqual('base\nleft\nright\nmerged', delta_data)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1396
        self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1397
            True)], logged_vf.calls)
3350.3.6 by Robert Collins
Test EOL behaviour of plain knit record adapters.
1398
3350.6.1 by Robert Collins
* New ``versionedfile.KeyMapper`` interface to abstract out the access to
1399
1400
class TestKeyMapper(TestCaseWithMemoryTransport):
1401
    """Tests for various key mapping logic."""
1402
1403
    def test_identity_mapper(self):
1404
        mapper = versionedfile.ConstantMapper("inventory")
1405
        self.assertEqual("inventory", mapper.map(('foo@ar',)))
1406
        self.assertEqual("inventory", mapper.map(('quux',)))
1407
1408
    def test_prefix_mapper(self):
1409
        #format5: plain
1410
        mapper = versionedfile.PrefixMapper()
1411
        self.assertEqual("file-id", mapper.map(("file-id", "revision-id")))
1412
        self.assertEqual("new-id", mapper.map(("new-id", "revision-id")))
1413
        self.assertEqual(('file-id',), mapper.unmap("file-id"))
1414
        self.assertEqual(('new-id',), mapper.unmap("new-id"))
1415
1416
    def test_hash_prefix_mapper(self):
1417
        #format6: hash + plain
1418
        mapper = versionedfile.HashPrefixMapper()
1419
        self.assertEqual("9b/file-id", mapper.map(("file-id", "revision-id")))
1420
        self.assertEqual("45/new-id", mapper.map(("new-id", "revision-id")))
1421
        self.assertEqual(('file-id',), mapper.unmap("9b/file-id"))
1422
        self.assertEqual(('new-id',), mapper.unmap("45/new-id"))
1423
1424
    def test_hash_escaped_mapper(self):
1425
        #knit1: hash + escaped
1426
        mapper = versionedfile.HashEscapedPrefixMapper()
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1427
        self.assertEqual("88/%2520", mapper.map((" ", "revision-id")))
1428
        self.assertEqual("ed/fil%2545-%2549d", mapper.map(("filE-Id",
1429
            "revision-id")))
1430
        self.assertEqual("88/ne%2557-%2549d", mapper.map(("neW-Id",
1431
            "revision-id")))
1432
        self.assertEqual(('filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))
1433
        self.assertEqual(('neW-Id',), mapper.unmap("88/ne%2557-%2549d"))
3350.6.2 by Robert Collins
Prepare parameterised test environment.
1434
1435
1436
class TestVersionedFiles(TestCaseWithMemoryTransport):
1437
    """Tests for the multiple-file variant of VersionedFile."""
1438
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1439
    def get_versionedfiles(self, relpath='files'):
1440
        transport = self.get_transport(relpath)
1441
        if relpath != '.':
1442
            transport.mkdir('.')
1443
        files = self.factory(transport)
1444
        if self.cleanup is not None:
1445
            self.addCleanup(lambda:self.cleanup(files))
1446
        return files
1447
1448
    def test_annotate(self):
1449
        files = self.get_versionedfiles()
1450
        self.get_diamond_files(files)
1451
        if self.key_length == 1:
1452
            prefix = ()
1453
        else:
1454
            prefix = ('FileA',)
1455
        # introduced full text
1456
        origins = files.annotate(prefix + ('origin',))
1457
        self.assertEqual([
1458
            (prefix + ('origin',), 'origin\n')],
1459
            origins)
1460
        # a delta
1461
        origins = files.annotate(prefix + ('base',))
1462
        self.assertEqual([
1463
            (prefix + ('base',), 'base\n')],
1464
            origins)
1465
        # a merge
1466
        origins = files.annotate(prefix + ('merged',))
1467
        if self.graph:
1468
            self.assertEqual([
1469
                (prefix + ('base',), 'base\n'),
1470
                (prefix + ('left',), 'left\n'),
1471
                (prefix + ('right',), 'right\n'),
1472
                (prefix + ('merged',), 'merged\n')
1473
                ],
1474
                origins)
1475
        else:
1476
            # Without a graph everything is new.
1477
            self.assertEqual([
1478
                (prefix + ('merged',), 'base\n'),
1479
                (prefix + ('merged',), 'left\n'),
1480
                (prefix + ('merged',), 'right\n'),
1481
                (prefix + ('merged',), 'merged\n')
1482
                ],
1483
                origins)
1484
        self.assertRaises(RevisionNotPresent,
1485
            files.annotate, prefix + ('missing-key',))
1486
3350.6.2 by Robert Collins
Prepare parameterised test environment.
1487
    def test_construct(self):
1488
        """Each parameterised test can be constructed on a transport."""
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1489
        files = self.get_versionedfiles()
1490
1491
    def get_diamond_files(self, files, trailing_eol=True, left_only=False):
1492
        return get_diamond_files(files, self.key_length,
1493
            trailing_eol=trailing_eol, nograph=not self.graph,
1494
            left_only=left_only)
1495
4119.1.1 by John Arbash Meinel
Move the 'add_lines_nostoresha' to being tested against all VF implementations.
1496
    def test_add_lines_nostoresha(self):
1497
        """When nostore_sha is supplied using old content raises."""
1498
        vf = self.get_versionedfiles()
1499
        empty_text = ('a', [])
1500
        sample_text_nl = ('b', ["foo\n", "bar\n"])
1501
        sample_text_no_nl = ('c', ["foo\n", "bar"])
1502
        shas = []
1503
        for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
1504
            sha, _, _ = vf.add_lines(self.get_simple_key(version), [], lines)
1505
            shas.append(sha)
1506
        # we now have a copy of all the lines in the vf.
1507
        for sha, (version, lines) in zip(
1508
            shas, (empty_text, sample_text_nl, sample_text_no_nl)):
1509
            new_key = self.get_simple_key(version + "2")
1510
            self.assertRaises(errors.ExistingContent,
1511
                vf.add_lines, new_key, [], lines,
1512
                nostore_sha=sha)
1513
            # and no new version should have been added.
1514
            record = vf.get_record_stream([new_key], 'unordered', True).next()
1515
            self.assertEqual('absent', record.storage_kind)
1516
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1517
    def test_add_lines_return(self):
1518
        files = self.get_versionedfiles()
1519
        # save code by using the stock data insertion helper.
1520
        adds = self.get_diamond_files(files)
1521
        results = []
1522
        # We can only validate the first 2 elements returned from add_lines.
1523
        for add in adds:
1524
            self.assertEqual(3, len(add))
1525
            results.append(add[:2])
1526
        if self.key_length == 1:
1527
            self.assertEqual([
1528
                ('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1529
                ('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1530
                ('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1531
                ('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1532
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1533
                results)
1534
        elif self.key_length == 2:
1535
            self.assertEqual([
1536
                ('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1537
                ('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1538
                ('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1539
                ('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1540
                ('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1541
                ('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1542
                ('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1543
                ('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1544
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1545
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1546
                results)
1547
1548
    def test_empty_lines(self):
1549
        """Empty files can be stored."""
1550
        f = self.get_versionedfiles()
1551
        key_a = self.get_simple_key('a')
1552
        f.add_lines(key_a, [], [])
1553
        self.assertEqual('',
1554
            f.get_record_stream([key_a], 'unordered', True
1555
                ).next().get_bytes_as('fulltext'))
1556
        key_b = self.get_simple_key('b')
1557
        f.add_lines(key_b, self.get_parents([key_a]), [])
1558
        self.assertEqual('',
1559
            f.get_record_stream([key_b], 'unordered', True
1560
                ).next().get_bytes_as('fulltext'))
1561
1562
    def test_newline_only(self):
1563
        f = self.get_versionedfiles()
1564
        key_a = self.get_simple_key('a')
1565
        f.add_lines(key_a, [], ['\n'])
1566
        self.assertEqual('\n',
1567
            f.get_record_stream([key_a], 'unordered', True
1568
                ).next().get_bytes_as('fulltext'))
1569
        key_b = self.get_simple_key('b')
1570
        f.add_lines(key_b, self.get_parents([key_a]), ['\n'])
1571
        self.assertEqual('\n',
1572
            f.get_record_stream([key_b], 'unordered', True
1573
                ).next().get_bytes_as('fulltext'))
1574
1575
    def test_get_record_stream_empty(self):
1576
        """An empty stream can be requested without error."""
1577
        f = self.get_versionedfiles()
1578
        entries = f.get_record_stream([], 'unordered', False)
1579
        self.assertEqual([], list(entries))
1580
1581
    def assertValidStorageKind(self, storage_kind):
1582
        """Assert that storage_kind is a valid storage_kind."""
1583
        self.assertSubset([storage_kind],
1584
            ['mpdiff', 'knit-annotated-ft', 'knit-annotated-delta',
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
1585
             'knit-ft', 'knit-delta', 'chunked', 'fulltext',
1586
             'knit-annotated-ft-gz', 'knit-annotated-delta-gz', 'knit-ft-gz',
4005.3.6 by Robert Collins
Support delta_closure=True with NetworkRecordStream to transmit deltas over the wire when full text extraction is required on the far end.
1587
             'knit-delta-gz',
1588
             'knit-delta-closure', 'knit-delta-closure-ref'])
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1589
1590
    def capture_stream(self, f, entries, on_seen, parents):
1591
        """Capture a stream for testing."""
1592
        for factory in entries:
1593
            on_seen(factory.key)
1594
            self.assertValidStorageKind(factory.storage_kind)
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
1595
            self.assertEqual(f.get_sha1s([factory.key])[factory.key],
1596
                factory.sha1)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1597
            self.assertEqual(parents[factory.key], factory.parents)
1598
            self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1599
                str)
1600
1601
    def test_get_record_stream_interface(self):
1602
        """each item in a stream has to provide a regular interface."""
1603
        files = self.get_versionedfiles()
1604
        self.get_diamond_files(files)
1605
        keys, _ = self.get_keys_and_sort_order()
1606
        parent_map = files.get_parent_map(keys)
1607
        entries = files.get_record_stream(keys, 'unordered', False)
1608
        seen = set()
1609
        self.capture_stream(files, entries, seen.add, parent_map)
1610
        self.assertEqual(set(keys), seen)
1611
1612
    def get_simple_key(self, suffix):
1613
        """Return a key for the object under test."""
1614
        if self.key_length == 1:
1615
            return (suffix,)
1616
        else:
1617
            return ('FileA',) + (suffix,)
1618
1619
    def get_keys_and_sort_order(self):
1620
        """Get diamond test keys list, and their sort ordering."""
1621
        if self.key_length == 1:
1622
            keys = [('merged',), ('left',), ('right',), ('base',)]
1623
            sort_order = {('merged',):2, ('left',):1, ('right',):1, ('base',):0}
1624
        else:
1625
            keys = [
1626
                ('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1627
                ('FileA', 'base'),
1628
                ('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1629
                ('FileB', 'base'),
1630
                ]
1631
            sort_order = {
1632
                ('FileA', 'merged'):2, ('FileA', 'left'):1, ('FileA', 'right'):1,
1633
                ('FileA', 'base'):0,
1634
                ('FileB', 'merged'):2, ('FileB', 'left'):1, ('FileB', 'right'):1,
1635
                ('FileB', 'base'):0,
1636
                }
1637
        return keys, sort_order
1638
4111.1.1 by Robert Collins
Add a groupcompress sort order.
1639
    def get_keys_and_groupcompress_sort_order(self):
1640
        """Get diamond test keys list, and their groupcompress sort ordering."""
1641
        if self.key_length == 1:
1642
            keys = [('merged',), ('left',), ('right',), ('base',)]
1643
            sort_order = {('merged',):0, ('left',):1, ('right',):1, ('base',):2}
1644
        else:
1645
            keys = [
1646
                ('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1647
                ('FileA', 'base'),
1648
                ('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1649
                ('FileB', 'base'),
1650
                ]
1651
            sort_order = {
1652
                ('FileA', 'merged'):0, ('FileA', 'left'):1, ('FileA', 'right'):1,
1653
                ('FileA', 'base'):2,
1654
                ('FileB', 'merged'):3, ('FileB', 'left'):4, ('FileB', 'right'):4,
1655
                ('FileB', 'base'):5,
1656
                }
1657
        return keys, sort_order
1658
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1659
    def test_get_record_stream_interface_ordered(self):
1660
        """each item in a stream has to provide a regular interface."""
1661
        files = self.get_versionedfiles()
1662
        self.get_diamond_files(files)
1663
        keys, sort_order = self.get_keys_and_sort_order()
1664
        parent_map = files.get_parent_map(keys)
1665
        entries = files.get_record_stream(keys, 'topological', False)
1666
        seen = []
1667
        self.capture_stream(files, entries, seen.append, parent_map)
1668
        self.assertStreamOrder(sort_order, seen, keys)
1669
1670
    def test_get_record_stream_interface_ordered_with_delta_closure(self):
1671
        """each item must be accessible as a fulltext."""
1672
        files = self.get_versionedfiles()
1673
        self.get_diamond_files(files)
1674
        keys, sort_order = self.get_keys_and_sort_order()
1675
        parent_map = files.get_parent_map(keys)
1676
        entries = files.get_record_stream(keys, 'topological', True)
1677
        seen = []
1678
        for factory in entries:
1679
            seen.append(factory.key)
1680
            self.assertValidStorageKind(factory.storage_kind)
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
1681
            self.assertSubset([factory.sha1],
1682
                [None, files.get_sha1s([factory.key])[factory.key]])
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1683
            self.assertEqual(parent_map[factory.key], factory.parents)
1684
            # self.assertEqual(files.get_text(factory.key),
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
1685
            ft_bytes = factory.get_bytes_as('fulltext')
1686
            self.assertIsInstance(ft_bytes, str)
1687
            chunked_bytes = factory.get_bytes_as('chunked')
1688
            self.assertEqualDiff(ft_bytes, ''.join(chunked_bytes))
1689
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1690
        self.assertStreamOrder(sort_order, seen, keys)
1691
4111.1.1 by Robert Collins
Add a groupcompress sort order.
1692
    def test_get_record_stream_interface_groupcompress(self):
1693
        """each item in a stream has to provide a regular interface."""
1694
        files = self.get_versionedfiles()
1695
        self.get_diamond_files(files)
1696
        keys, sort_order = self.get_keys_and_groupcompress_sort_order()
1697
        parent_map = files.get_parent_map(keys)
1698
        entries = files.get_record_stream(keys, 'groupcompress', False)
1699
        seen = []
1700
        self.capture_stream(files, entries, seen.append, parent_map)
1701
        self.assertStreamOrder(sort_order, seen, keys)
1702
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1703
    def assertStreamOrder(self, sort_order, seen, keys):
1704
        self.assertEqual(len(set(seen)), len(keys))
1705
        if self.key_length == 1:
1706
            lows = {():0}
1707
        else:
1708
            lows = {('FileA',):0, ('FileB',):0}
1709
        if not self.graph:
1710
            self.assertEqual(set(keys), set(seen))
1711
        else:
1712
            for key in seen:
1713
                sort_pos = sort_order[key]
1714
                self.assertTrue(sort_pos >= lows[key[:-1]],
1715
                    "Out of order in sorted stream: %r, %r" % (key, seen))
1716
                lows[key[:-1]] = sort_pos
1717
1718
    def test_get_record_stream_unknown_storage_kind_raises(self):
1719
        """Asking for a storage kind that the stream cannot supply raises."""
1720
        files = self.get_versionedfiles()
1721
        self.get_diamond_files(files)
1722
        if self.key_length == 1:
1723
            keys = [('merged',), ('left',), ('right',), ('base',)]
1724
        else:
1725
            keys = [
1726
                ('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1727
                ('FileA', 'base'),
1728
                ('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1729
                ('FileB', 'base'),
1730
                ]
1731
        parent_map = files.get_parent_map(keys)
1732
        entries = files.get_record_stream(keys, 'unordered', False)
1733
        # We track the contents because we should be able to try, fail a
1734
        # particular kind and then ask for one that works and continue.
1735
        seen = set()
1736
        for factory in entries:
1737
            seen.add(factory.key)
1738
            self.assertValidStorageKind(factory.storage_kind)
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
1739
            self.assertEqual(files.get_sha1s([factory.key])[factory.key],
1740
                factory.sha1)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1741
            self.assertEqual(parent_map[factory.key], factory.parents)
1742
            # currently no stream emits mpdiff
1743
            self.assertRaises(errors.UnavailableRepresentation,
1744
                factory.get_bytes_as, 'mpdiff')
1745
            self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1746
                str)
1747
        self.assertEqual(set(keys), seen)
1748
1749
    def test_get_record_stream_missing_records_are_absent(self):
1750
        files = self.get_versionedfiles()
1751
        self.get_diamond_files(files)
1752
        if self.key_length == 1:
1753
            keys = [('merged',), ('left',), ('right',), ('absent',), ('base',)]
1754
        else:
1755
            keys = [
1756
                ('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1757
                ('FileA', 'absent'), ('FileA', 'base'),
1758
                ('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1759
                ('FileB', 'absent'), ('FileB', 'base'),
1760
                ('absent', 'absent'),
1761
                ]
1762
        parent_map = files.get_parent_map(keys)
1763
        entries = files.get_record_stream(keys, 'unordered', False)
1764
        self.assertAbsentRecord(files, keys, parent_map, entries)
1765
        entries = files.get_record_stream(keys, 'topological', False)
1766
        self.assertAbsentRecord(files, keys, parent_map, entries)
1767
4005.3.3 by Robert Collins
Test NetworkRecordStream with delta'd texts.
1768
    def assertRecordHasContent(self, record, bytes):
1769
        """Assert that record has the bytes bytes."""
1770
        self.assertEqual(bytes, record.get_bytes_as('fulltext'))
4005.3.7 by Robert Collins
Review feedback.
1771
        self.assertEqual(bytes, ''.join(record.get_bytes_as('chunked')))
4005.3.3 by Robert Collins
Test NetworkRecordStream with delta'd texts.
1772
4005.3.2 by Robert Collins
First passing NetworkRecordStream test - a fulltext from any record type which isn't a chunked or fulltext can be serialised and deserialised successfully.
1773
    def test_get_record_stream_native_formats_are_wire_ready_one_ft(self):
1774
        files = self.get_versionedfiles()
1775
        key = self.get_simple_key('foo')
1776
        files.add_lines(key, (), ['my text\n', 'content'])
1777
        stream = files.get_record_stream([key], 'unordered', False)
1778
        record = stream.next()
1779
        if record.storage_kind in ('chunked', 'fulltext'):
1780
            # chunked and fulltext representations are for direct use not wire
4005.3.3 by Robert Collins
Test NetworkRecordStream with delta'd texts.
1781
            # serialisation: check they are able to be used directly. To send
1782
            # such records over the wire translation will be needed.
1783
            self.assertRecordHasContent(record, "my text\ncontent")
4005.3.2 by Robert Collins
First passing NetworkRecordStream test - a fulltext from any record type which isn't a chunked or fulltext can be serialised and deserialised successfully.
1784
        else:
1785
            bytes = [record.get_bytes_as(record.storage_kind)]
1786
            network_stream = versionedfile.NetworkRecordStream(bytes).read()
1787
            source_record = record
1788
            records = []
1789
            for record in network_stream:
1790
                records.append(record)
1791
                self.assertEqual(source_record.storage_kind,
1792
                    record.storage_kind)
1793
                self.assertEqual(source_record.parents, record.parents)
1794
                self.assertEqual(
1795
                    source_record.get_bytes_as(source_record.storage_kind),
1796
                    record.get_bytes_as(record.storage_kind))
1797
            self.assertEqual(1, len(records))
1798
4005.3.5 by Robert Collins
Interface level test for using delta_closure=True over the network.
1799
    def assertStreamMetaEqual(self, records, expected, stream):
1800
        """Assert that streams expected and stream have the same records.
4032.1.2 by John Arbash Meinel
Track down a few more files that have trailing whitespace.
1801
4005.3.5 by Robert Collins
Interface level test for using delta_closure=True over the network.
1802
        :param records: A list to collect the seen records.
1803
        :return: A generator of the records in stream.
1804
        """
1805
        # We make assertions during copying to catch things early for
1806
        # easier debugging.
1807
        for record, ref_record in izip(stream, expected):
1808
            records.append(record)
1809
            self.assertEqual(ref_record.key, record.key)
1810
            self.assertEqual(ref_record.storage_kind, record.storage_kind)
1811
            self.assertEqual(ref_record.parents, record.parents)
1812
            yield record
1813
1814
    def stream_to_bytes_or_skip_counter(self, skipped_records, full_texts,
1815
        stream):
1816
        """Convert a stream to a bytes iterator.
1817
1818
        :param skipped_records: A list with one element to increment when a
1819
            record is skipped.
4032.1.2 by John Arbash Meinel
Track down a few more files that have trailing whitespace.
1820
        :param full_texts: A dict from key->fulltext representation, for
4005.3.5 by Robert Collins
Interface level test for using delta_closure=True over the network.
1821
            checking chunked or fulltext stored records.
1822
        :param stream: A record_stream.
1823
        :return: An iterator over the bytes of each record.
1824
        """
1825
        for record in stream:
1826
            if record.storage_kind in ('chunked', 'fulltext'):
1827
                skipped_records[0] += 1
1828
                # check the content is correct for direct use.
1829
                self.assertRecordHasContent(record, full_texts[record.key])
1830
            else:
1831
                yield record.get_bytes_as(record.storage_kind)
1832
4005.3.3 by Robert Collins
Test NetworkRecordStream with delta'd texts.
1833
    def test_get_record_stream_native_formats_are_wire_ready_ft_delta(self):
1834
        files = self.get_versionedfiles()
1835
        target_files = self.get_versionedfiles('target')
1836
        key = self.get_simple_key('ft')
1837
        key_delta = self.get_simple_key('delta')
1838
        files.add_lines(key, (), ['my text\n', 'content'])
1839
        if self.graph:
1840
            delta_parents = (key,)
1841
        else:
1842
            delta_parents = ()
1843
        files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
1844
        local = files.get_record_stream([key, key_delta], 'unordered', False)
1845
        ref = files.get_record_stream([key, key_delta], 'unordered', False)
1846
        skipped_records = [0]
4005.3.5 by Robert Collins
Interface level test for using delta_closure=True over the network.
1847
        full_texts = {
1848
            key: "my text\ncontent",
1849
            key_delta: "different\ncontent\n",
1850
            }
1851
        byte_stream = self.stream_to_bytes_or_skip_counter(
1852
            skipped_records, full_texts, local)
4005.3.3 by Robert Collins
Test NetworkRecordStream with delta'd texts.
1853
        network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
1854
        records = []
1855
        # insert the stream from the network into a versioned files object so we can
1856
        # check the content was carried across correctly without doing delta
1857
        # inspection.
4005.3.5 by Robert Collins
Interface level test for using delta_closure=True over the network.
1858
        target_files.insert_record_stream(
1859
            self.assertStreamMetaEqual(records, ref, network_stream))
4005.3.3 by Robert Collins
Test NetworkRecordStream with delta'd texts.
1860
        # No duplicates on the wire thank you!
1861
        self.assertEqual(2, len(records) + skipped_records[0])
1862
        if len(records):
4005.3.4 by Robert Collins
Test copying just a delta over the wire.
1863
            # if any content was copied it all must have all been.
1864
            self.assertIdenticalVersionedFile(files, target_files)
1865
1866
    def test_get_record_stream_native_formats_are_wire_ready_delta(self):
1867
        # copy a delta over the wire
1868
        files = self.get_versionedfiles()
1869
        target_files = self.get_versionedfiles('target')
1870
        key = self.get_simple_key('ft')
1871
        key_delta = self.get_simple_key('delta')
1872
        files.add_lines(key, (), ['my text\n', 'content'])
1873
        if self.graph:
1874
            delta_parents = (key,)
1875
        else:
1876
            delta_parents = ()
1877
        files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
4005.3.5 by Robert Collins
Interface level test for using delta_closure=True over the network.
1878
        # Copy the basis text across so we can reconstruct the delta during
1879
        # insertion into target.
4005.3.4 by Robert Collins
Test copying just a delta over the wire.
1880
        target_files.insert_record_stream(files.get_record_stream([key],
1881
            'unordered', False))
1882
        local = files.get_record_stream([key_delta], 'unordered', False)
1883
        ref = files.get_record_stream([key_delta], 'unordered', False)
1884
        skipped_records = [0]
4005.3.5 by Robert Collins
Interface level test for using delta_closure=True over the network.
1885
        full_texts = {
1886
            key_delta: "different\ncontent\n",
1887
            }
1888
        byte_stream = self.stream_to_bytes_or_skip_counter(
1889
            skipped_records, full_texts, local)
4005.3.4 by Robert Collins
Test copying just a delta over the wire.
1890
        network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
1891
        records = []
1892
        # insert the stream from the network into a versioned files object so we can
1893
        # check the content was carried across correctly without doing delta
1894
        # inspection during check_stream.
4005.3.5 by Robert Collins
Interface level test for using delta_closure=True over the network.
1895
        target_files.insert_record_stream(
1896
            self.assertStreamMetaEqual(records, ref, network_stream))
4005.3.4 by Robert Collins
Test copying just a delta over the wire.
1897
        # No duplicates on the wire thank you!
1898
        self.assertEqual(1, len(records) + skipped_records[0])
1899
        if len(records):
1900
            # if any content was copied it all must have all been
4005.3.3 by Robert Collins
Test NetworkRecordStream with delta'd texts.
1901
            self.assertIdenticalVersionedFile(files, target_files)
1902
4005.3.5 by Robert Collins
Interface level test for using delta_closure=True over the network.
1903
    def test_get_record_stream_wire_ready_delta_closure_included(self):
1904
        # copy a delta over the wire with the ability to get its full text.
1905
        files = self.get_versionedfiles()
1906
        key = self.get_simple_key('ft')
1907
        key_delta = self.get_simple_key('delta')
1908
        files.add_lines(key, (), ['my text\n', 'content'])
1909
        if self.graph:
1910
            delta_parents = (key,)
1911
        else:
1912
            delta_parents = ()
1913
        files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
1914
        local = files.get_record_stream([key_delta], 'unordered', True)
1915
        ref = files.get_record_stream([key_delta], 'unordered', True)
1916
        skipped_records = [0]
1917
        full_texts = {
1918
            key_delta: "different\ncontent\n",
1919
            }
1920
        byte_stream = self.stream_to_bytes_or_skip_counter(
1921
            skipped_records, full_texts, local)
1922
        network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
1923
        records = []
1924
        # insert the stream from the network into a versioned files object so we can
1925
        # check the content was carried across correctly without doing delta
1926
        # inspection during check_stream.
1927
        for record in self.assertStreamMetaEqual(records, ref, network_stream):
1928
            # we have to be able to get the full text out:
1929
            self.assertRecordHasContent(record, full_texts[record.key])
1930
        # No duplicates on the wire thank you!
1931
        self.assertEqual(1, len(records) + skipped_records[0])
1932
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1933
    def assertAbsentRecord(self, files, keys, parents, entries):
1934
        """Helper for test_get_record_stream_missing_records_are_absent."""
1935
        seen = set()
1936
        for factory in entries:
1937
            seen.add(factory.key)
1938
            if factory.key[-1] == 'absent':
1939
                self.assertEqual('absent', factory.storage_kind)
1940
                self.assertEqual(None, factory.sha1)
1941
                self.assertEqual(None, factory.parents)
1942
            else:
1943
                self.assertValidStorageKind(factory.storage_kind)
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
1944
                self.assertEqual(files.get_sha1s([factory.key])[factory.key],
1945
                    factory.sha1)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1946
                self.assertEqual(parents[factory.key], factory.parents)
1947
                self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1948
                    str)
1949
        self.assertEqual(set(keys), seen)
1950
1951
    def test_filter_absent_records(self):
1952
        """Requested missing records can be filter trivially."""
1953
        files = self.get_versionedfiles()
1954
        self.get_diamond_files(files)
1955
        keys, _ = self.get_keys_and_sort_order()
1956
        parent_map = files.get_parent_map(keys)
1957
        # Add an absent record in the middle of the present keys. (We don't ask
1958
        # for just absent keys to ensure that content before and after the
1959
        # absent keys is still delivered).
1960
        present_keys = list(keys)
1961
        if self.key_length == 1:
1962
            keys.insert(2, ('extra',))
1963
        else:
1964
            keys.insert(2, ('extra', 'extra'))
1965
        entries = files.get_record_stream(keys, 'unordered', False)
1966
        seen = set()
1967
        self.capture_stream(files, versionedfile.filter_absent(entries), seen.add,
1968
            parent_map)
1969
        self.assertEqual(set(present_keys), seen)
1970
1971
    def get_mapper(self):
1972
        """Get a mapper suitable for the key length of the test interface."""
1973
        if self.key_length == 1:
1974
            return ConstantMapper('source')
1975
        else:
1976
            return HashEscapedPrefixMapper()
1977
1978
    def get_parents(self, parents):
1979
        """Get parents, taking self.graph into consideration."""
1980
        if self.graph:
1981
            return parents
1982
        else:
1983
            return None
1984
1985
    def test_get_parent_map(self):
1986
        files = self.get_versionedfiles()
1987
        if self.key_length == 1:
1988
            parent_details = [
1989
                (('r0',), self.get_parents(())),
1990
                (('r1',), self.get_parents((('r0',),))),
1991
                (('r2',), self.get_parents(())),
1992
                (('r3',), self.get_parents(())),
1993
                (('m',), self.get_parents((('r0',),('r1',),('r2',),('r3',)))),
1994
                ]
1995
        else:
1996
            parent_details = [
1997
                (('FileA', 'r0'), self.get_parents(())),
1998
                (('FileA', 'r1'), self.get_parents((('FileA', 'r0'),))),
1999
                (('FileA', 'r2'), self.get_parents(())),
2000
                (('FileA', 'r3'), self.get_parents(())),
2001
                (('FileA', 'm'), self.get_parents((('FileA', 'r0'),
2002
                    ('FileA', 'r1'), ('FileA', 'r2'), ('FileA', 'r3')))),
2003
                ]
2004
        for key, parents in parent_details:
2005
            files.add_lines(key, parents, [])
2006
            # immediately after adding it should be queryable.
2007
            self.assertEqual({key:parents}, files.get_parent_map([key]))
2008
        # We can ask for an empty set
2009
        self.assertEqual({}, files.get_parent_map([]))
2010
        # We can ask for many keys
2011
        all_parents = dict(parent_details)
2012
        self.assertEqual(all_parents, files.get_parent_map(all_parents.keys()))
2013
        # Absent keys are just not included in the result.
2014
        keys = all_parents.keys()
2015
        if self.key_length == 1:
2016
            keys.insert(1, ('missing',))
2017
        else:
2018
            keys.insert(1, ('missing', 'missing'))
2019
        # Absent keys are just ignored
2020
        self.assertEqual(all_parents, files.get_parent_map(keys))
2021
2022
    def test_get_sha1s(self):
2023
        files = self.get_versionedfiles()
2024
        self.get_diamond_files(files)
2025
        if self.key_length == 1:
2026
            keys = [('base',), ('origin',), ('left',), ('merged',), ('right',)]
2027
        else:
2028
            # ask for shas from different prefixes.
2029
            keys = [
2030
                ('FileA', 'base'), ('FileB', 'origin'), ('FileA', 'left'),
2031
                ('FileA', 'merged'), ('FileB', 'right'),
2032
                ]
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
2033
        self.assertEqual({
2034
            keys[0]: '51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',
2035
            keys[1]: '00e364d235126be43292ab09cb4686cf703ddc17',
2036
            keys[2]: 'a8478686da38e370e32e42e8a0c220e33ee9132f',
2037
            keys[3]: 'ed8bce375198ea62444dc71952b22cfc2b09226d',
2038
            keys[4]: '9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',
2039
            },
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2040
            files.get_sha1s(keys))
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
2041
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2042
    def test_insert_record_stream_empty(self):
2043
        """Inserting an empty record stream should work."""
2044
        files = self.get_versionedfiles()
2045
        files.insert_record_stream([])
2046
2047
    def assertIdenticalVersionedFile(self, expected, actual):
2048
        """Assert that left and right have the same contents."""
2049
        self.assertEqual(set(actual.keys()), set(expected.keys()))
2050
        actual_parents = actual.get_parent_map(actual.keys())
2051
        if self.graph:
2052
            self.assertEqual(actual_parents, expected.get_parent_map(expected.keys()))
2053
        else:
2054
            for key, parents in actual_parents.items():
2055
                self.assertEqual(None, parents)
2056
        for key in actual.keys():
2057
            actual_text = actual.get_record_stream(
2058
                [key], 'unordered', True).next().get_bytes_as('fulltext')
2059
            expected_text = expected.get_record_stream(
2060
                [key], 'unordered', True).next().get_bytes_as('fulltext')
2061
            self.assertEqual(actual_text, expected_text)
2062
2063
    def test_insert_record_stream_fulltexts(self):
2064
        """Any file should accept a stream of fulltexts."""
2065
        files = self.get_versionedfiles()
2066
        mapper = self.get_mapper()
2067
        source_transport = self.get_transport('source')
2068
        source_transport.mkdir('.')
2069
        # weaves always output fulltexts.
2070
        source = make_versioned_files_factory(WeaveFile, mapper)(
2071
            source_transport)
2072
        self.get_diamond_files(source, trailing_eol=False)
2073
        stream = source.get_record_stream(source.keys(), 'topological',
2074
            False)
2075
        files.insert_record_stream(stream)
2076
        self.assertIdenticalVersionedFile(source, files)
2077
2078
    def test_insert_record_stream_fulltexts_noeol(self):
2079
        """Any file should accept a stream of fulltexts."""
2080
        files = self.get_versionedfiles()
2081
        mapper = self.get_mapper()
2082
        source_transport = self.get_transport('source')
2083
        source_transport.mkdir('.')
2084
        # weaves always output fulltexts.
2085
        source = make_versioned_files_factory(WeaveFile, mapper)(
2086
            source_transport)
2087
        self.get_diamond_files(source, trailing_eol=False)
2088
        stream = source.get_record_stream(source.keys(), 'topological',
2089
            False)
2090
        files.insert_record_stream(stream)
2091
        self.assertIdenticalVersionedFile(source, files)
2092
2093
    def test_insert_record_stream_annotated_knits(self):
2094
        """Any file should accept a stream from plain knits."""
2095
        files = self.get_versionedfiles()
2096
        mapper = self.get_mapper()
2097
        source_transport = self.get_transport('source')
2098
        source_transport.mkdir('.')
2099
        source = make_file_factory(True, mapper)(source_transport)
2100
        self.get_diamond_files(source)
2101
        stream = source.get_record_stream(source.keys(), 'topological',
2102
            False)
2103
        files.insert_record_stream(stream)
2104
        self.assertIdenticalVersionedFile(source, files)
2105
2106
    def test_insert_record_stream_annotated_knits_noeol(self):
2107
        """Any file should accept a stream from plain knits."""
2108
        files = self.get_versionedfiles()
2109
        mapper = self.get_mapper()
2110
        source_transport = self.get_transport('source')
2111
        source_transport.mkdir('.')
2112
        source = make_file_factory(True, mapper)(source_transport)
2113
        self.get_diamond_files(source, trailing_eol=False)
2114
        stream = source.get_record_stream(source.keys(), 'topological',
2115
            False)
2116
        files.insert_record_stream(stream)
2117
        self.assertIdenticalVersionedFile(source, files)
2118
2119
    def test_insert_record_stream_plain_knits(self):
2120
        """Any file should accept a stream from plain knits."""
2121
        files = self.get_versionedfiles()
2122
        mapper = self.get_mapper()
2123
        source_transport = self.get_transport('source')
2124
        source_transport.mkdir('.')
2125
        source = make_file_factory(False, mapper)(source_transport)
2126
        self.get_diamond_files(source)
2127
        stream = source.get_record_stream(source.keys(), 'topological',
2128
            False)
2129
        files.insert_record_stream(stream)
2130
        self.assertIdenticalVersionedFile(source, files)
2131
2132
    def test_insert_record_stream_plain_knits_noeol(self):
2133
        """Any file should accept a stream from plain knits."""
2134
        files = self.get_versionedfiles()
2135
        mapper = self.get_mapper()
2136
        source_transport = self.get_transport('source')
2137
        source_transport.mkdir('.')
2138
        source = make_file_factory(False, mapper)(source_transport)
2139
        self.get_diamond_files(source, trailing_eol=False)
2140
        stream = source.get_record_stream(source.keys(), 'topological',
2141
            False)
2142
        files.insert_record_stream(stream)
2143
        self.assertIdenticalVersionedFile(source, files)
2144
2145
    def test_insert_record_stream_existing_keys(self):
2146
        """Inserting keys already in a file should not error."""
2147
        files = self.get_versionedfiles()
2148
        source = self.get_versionedfiles('source')
2149
        self.get_diamond_files(source)
2150
        # insert some keys into f.
2151
        self.get_diamond_files(files, left_only=True)
2152
        stream = source.get_record_stream(source.keys(), 'topological',
2153
            False)
2154
        files.insert_record_stream(stream)
2155
        self.assertIdenticalVersionedFile(source, files)
2156
2157
    def test_insert_record_stream_missing_keys(self):
2158
        """Inserting a stream with absent keys should raise an error."""
2159
        files = self.get_versionedfiles()
2160
        source = self.get_versionedfiles('source')
2161
        stream = source.get_record_stream([('missing',) * self.key_length],
2162
            'topological', False)
2163
        self.assertRaises(errors.RevisionNotPresent, files.insert_record_stream,
2164
            stream)
2165
2166
    def test_insert_record_stream_out_of_order(self):
2167
        """An out of order stream can either error or work."""
2168
        files = self.get_versionedfiles()
2169
        source = self.get_versionedfiles('source')
2170
        self.get_diamond_files(source)
2171
        if self.key_length == 1:
2172
            origin_keys = [('origin',)]
2173
            end_keys = [('merged',), ('left',)]
2174
            start_keys = [('right',), ('base',)]
2175
        else:
2176
            origin_keys = [('FileA', 'origin'), ('FileB', 'origin')]
2177
            end_keys = [('FileA', 'merged',), ('FileA', 'left',),
2178
                ('FileB', 'merged',), ('FileB', 'left',)]
2179
            start_keys = [('FileA', 'right',), ('FileA', 'base',),
2180
                ('FileB', 'right',), ('FileB', 'base',)]
2181
        origin_entries = source.get_record_stream(origin_keys, 'unordered', False)
2182
        end_entries = source.get_record_stream(end_keys, 'topological', False)
2183
        start_entries = source.get_record_stream(start_keys, 'topological', False)
2184
        entries = chain(origin_entries, end_entries, start_entries)
2185
        try:
2186
            files.insert_record_stream(entries)
2187
        except RevisionNotPresent:
2188
            # Must not have corrupted the file.
2189
            files.check()
2190
        else:
2191
            self.assertIdenticalVersionedFile(source, files)
2192
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2193
    def get_knit_delta_source(self):
2194
        """Get a source that can produce a stream with knit delta records,
2195
        regardless of this test's scenario.
2196
        """
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2197
        mapper = self.get_mapper()
2198
        source_transport = self.get_transport('source')
2199
        source_transport.mkdir('.')
2200
        source = make_file_factory(False, mapper)(source_transport)
4009.3.1 by Andrew Bennetts
Fix test_insert_record_stream_delta_missing_basis_no_corruption to test what it claims to, and fix KnitVersionedFiles.get_record_stream to match the expected exception.
2201
        get_diamond_files(source, self.key_length, trailing_eol=True,
2202
            nograph=False, left_only=False)
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2203
        return source
2204
2205
    def test_insert_record_stream_delta_missing_basis_no_corruption(self):
2206
        """Insertion where a needed basis is not included notifies the caller
2207
        of the missing basis.  In the meantime a record missing its basis is
2208
        not added.
2209
        """
2210
        source = self.get_knit_delta_source()
4009.3.7 by Andrew Bennetts
Most tests passing.
2211
        keys = [self.get_simple_key('origin'), self.get_simple_key('merged')]
2212
        entries = source.get_record_stream(keys, 'unordered', False)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2213
        files = self.get_versionedfiles()
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2214
        if self.support_partial_insertion:
4009.3.12 by Robert Collins
Polish on inserting record streams with missing compression parents.
2215
            self.assertEqual([],
2216
                list(files.get_missing_compression_parent_keys()))
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2217
            files.insert_record_stream(entries)
2218
            missing_bases = files.get_missing_compression_parent_keys()
2219
            self.assertEqual(set([self.get_simple_key('left')]),
2220
                set(missing_bases))
4009.3.7 by Andrew Bennetts
Most tests passing.
2221
            self.assertEqual(set(keys), set(files.get_parent_map(keys)))
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2222
        else:
2223
            self.assertRaises(
2224
                errors.RevisionNotPresent, files.insert_record_stream, entries)
4009.3.7 by Andrew Bennetts
Most tests passing.
2225
            files.check()
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2226
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2227
    def test_insert_record_stream_delta_missing_basis_can_be_added_later(self):
2228
        """Insertion where a needed basis is not included notifies the caller
2229
        of the missing basis.  That basis can be added in a second
2230
        insert_record_stream call that does not need to repeat records present
4009.3.3 by Andrew Bennetts
Add docstrings.
2231
        in the previous stream.  The record(s) that required that basis are
2232
        fully inserted once their basis is no longer missing.
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2233
        """
2234
        if not self.support_partial_insertion:
2235
            raise TestNotApplicable(
2236
                'versioned file scenario does not support partial insertion')
2237
        source = self.get_knit_delta_source()
2238
        entries = source.get_record_stream([self.get_simple_key('origin'),
2239
            self.get_simple_key('merged')], 'unordered', False)
2240
        files = self.get_versionedfiles()
2241
        files.insert_record_stream(entries)
2242
        missing_bases = files.get_missing_compression_parent_keys()
2243
        self.assertEqual(set([self.get_simple_key('left')]),
2244
            set(missing_bases))
4009.3.7 by Andrew Bennetts
Most tests passing.
2245
        # 'merged' is inserted (although a commit of a write group involving
2246
        # this versionedfiles would fail).
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2247
        merged_key = self.get_simple_key('merged')
4009.3.7 by Andrew Bennetts
Most tests passing.
2248
        self.assertEqual(
2249
            [merged_key], files.get_parent_map([merged_key]).keys())
2250
        # Add the full delta closure of the missing records
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2251
        missing_entries = source.get_record_stream(
4009.3.7 by Andrew Bennetts
Most tests passing.
2252
            missing_bases, 'unordered', True)
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2253
        files.insert_record_stream(missing_entries)
4009.3.7 by Andrew Bennetts
Most tests passing.
2254
        # Now 'merged' is fully inserted (and a commit would succeed).
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2255
        self.assertEqual([], list(files.get_missing_compression_parent_keys()))
2256
        self.assertEqual(
2257
            [merged_key], files.get_parent_map([merged_key]).keys())
4009.3.7 by Andrew Bennetts
Most tests passing.
2258
        files.check()
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2259
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2260
    def test_iter_lines_added_or_present_in_keys(self):
2261
        # test that we get at least an equalset of the lines added by
2262
        # versions in the store.
2263
        # the ordering here is to make a tree so that dumb searches have
2264
        # more changes to muck up.
2265
2266
        class InstrumentedProgress(progress.DummyProgress):
2267
2268
            def __init__(self):
2269
2270
                progress.DummyProgress.__init__(self)
2271
                self.updates = []
2272
2273
            def update(self, msg=None, current=None, total=None):
2274
                self.updates.append((msg, current, total))
2275
2276
        files = self.get_versionedfiles()
2277
        # add a base to get included
2278
        files.add_lines(self.get_simple_key('base'), (), ['base\n'])
2279
        # add a ancestor to be included on one side
2280
        files.add_lines(self.get_simple_key('lancestor'), (), ['lancestor\n'])
2281
        # add a ancestor to be included on the other side
2282
        files.add_lines(self.get_simple_key('rancestor'),
2283
            self.get_parents([self.get_simple_key('base')]), ['rancestor\n'])
2284
        # add a child of rancestor with no eofile-nl
2285
        files.add_lines(self.get_simple_key('child'),
2286
            self.get_parents([self.get_simple_key('rancestor')]),
2287
            ['base\n', 'child\n'])
2288
        # add a child of lancestor and base to join the two roots
2289
        files.add_lines(self.get_simple_key('otherchild'),
2290
            self.get_parents([self.get_simple_key('lancestor'),
2291
                self.get_simple_key('base')]),
2292
            ['base\n', 'lancestor\n', 'otherchild\n'])
2293
        def iter_with_keys(keys, expected):
2294
            # now we need to see what lines are returned, and how often.
2295
            lines = {}
2296
            progress = InstrumentedProgress()
2297
            # iterate over the lines
2298
            for line in files.iter_lines_added_or_present_in_keys(keys,
2299
                pb=progress):
2300
                lines.setdefault(line, 0)
2301
                lines[line] += 1
2302
            if []!= progress.updates:
2303
                self.assertEqual(expected, progress.updates)
2304
            return lines
2305
        lines = iter_with_keys(
2306
            [self.get_simple_key('child'), self.get_simple_key('otherchild')],
4103.3.4 by Martin Pool
Update test that depends on progress bar strings
2307
            [('Walking content', 0, 2),
2308
             ('Walking content', 1, 2),
2309
             ('Walking content', 2, 2)])
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2310
        # we must see child and otherchild
2311
        self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2312
        self.assertTrue(
2313
            lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2314
        # we dont care if we got more than that.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
2315
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2316
        # test all lines
2317
        lines = iter_with_keys(files.keys(),
4103.3.4 by Martin Pool
Update test that depends on progress bar strings
2318
            [('Walking content', 0, 5),
2319
             ('Walking content', 1, 5),
2320
             ('Walking content', 2, 5),
2321
             ('Walking content', 3, 5),
2322
             ('Walking content', 4, 5),
2323
             ('Walking content', 5, 5)])
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2324
        # all lines must be seen at least once
2325
        self.assertTrue(lines[('base\n', self.get_simple_key('base'))] > 0)
2326
        self.assertTrue(
2327
            lines[('lancestor\n', self.get_simple_key('lancestor'))] > 0)
2328
        self.assertTrue(
2329
            lines[('rancestor\n', self.get_simple_key('rancestor'))] > 0)
2330
        self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2331
        self.assertTrue(
2332
            lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2333
2334
    def test_make_mpdiffs(self):
2335
        from bzrlib import multiparent
2336
        files = self.get_versionedfiles('source')
2337
        # add texts that should trip the knit maximum delta chain threshold
2338
        # as well as doing parallel chains of data in knits.
2339
        # this is done by two chains of 25 insertions
2340
        files.add_lines(self.get_simple_key('base'), [], ['line\n'])
2341
        files.add_lines(self.get_simple_key('noeol'),
2342
            self.get_parents([self.get_simple_key('base')]), ['line'])
2343
        # detailed eol tests:
2344
        # shared last line with parent no-eol
2345
        files.add_lines(self.get_simple_key('noeolsecond'),
2346
            self.get_parents([self.get_simple_key('noeol')]),
2347
                ['line\n', 'line'])
2348
        # differing last line with parent, both no-eol
2349
        files.add_lines(self.get_simple_key('noeolnotshared'),
2350
            self.get_parents([self.get_simple_key('noeolsecond')]),
2351
                ['line\n', 'phone'])
2352
        # add eol following a noneol parent, change content
2353
        files.add_lines(self.get_simple_key('eol'),
2354
            self.get_parents([self.get_simple_key('noeol')]), ['phone\n'])
2355
        # add eol following a noneol parent, no change content
2356
        files.add_lines(self.get_simple_key('eolline'),
2357
            self.get_parents([self.get_simple_key('noeol')]), ['line\n'])
2358
        # noeol with no parents:
2359
        files.add_lines(self.get_simple_key('noeolbase'), [], ['line'])
2360
        # noeol preceeding its leftmost parent in the output:
2361
        # this is done by making it a merge of two parents with no common
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
2362
        # anestry: noeolbase and noeol with the
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2363
        # later-inserted parent the leftmost.
2364
        files.add_lines(self.get_simple_key('eolbeforefirstparent'),
2365
            self.get_parents([self.get_simple_key('noeolbase'),
2366
                self.get_simple_key('noeol')]),
2367
            ['line'])
2368
        # two identical eol texts
2369
        files.add_lines(self.get_simple_key('noeoldup'),
2370
            self.get_parents([self.get_simple_key('noeol')]), ['line'])
2371
        next_parent = self.get_simple_key('base')
2372
        text_name = 'chain1-'
2373
        text = ['line\n']
2374
        sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
2375
                 1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
2376
                 2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
2377
                 3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
2378
                 4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
2379
                 5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
2380
                 6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
2381
                 7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
2382
                 8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
2383
                 9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
2384
                 10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
2385
                 11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
2386
                 12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
2387
                 13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
2388
                 14:'2c4b1736566b8ca6051e668de68650686a3922f2',
2389
                 15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
2390
                 16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
2391
                 17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
2392
                 18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
2393
                 19:'1ebed371807ba5935958ad0884595126e8c4e823',
2394
                 20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
2395
                 21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
2396
                 22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
2397
                 23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
2398
                 24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
2399
                 25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
2400
                 }
2401
        for depth in range(26):
2402
            new_version = self.get_simple_key(text_name + '%s' % depth)
2403
            text = text + ['line\n']
2404
            files.add_lines(new_version, self.get_parents([next_parent]), text)
2405
            next_parent = new_version
2406
        next_parent = self.get_simple_key('base')
2407
        text_name = 'chain2-'
2408
        text = ['line\n']
2409
        for depth in range(26):
2410
            new_version = self.get_simple_key(text_name + '%s' % depth)
2411
            text = text + ['line\n']
2412
            files.add_lines(new_version, self.get_parents([next_parent]), text)
2413
            next_parent = new_version
2414
        target = self.get_versionedfiles('target')
2415
        for key in multiparent.topo_iter_keys(files, files.keys()):
2416
            mpdiff = files.make_mpdiffs([key])[0]
2417
            parents = files.get_parent_map([key])[key] or []
2418
            target.add_mpdiffs(
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
2419
                [(key, parents, files.get_sha1s([key])[key], mpdiff)])
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2420
            self.assertEqualDiff(
2421
                files.get_record_stream([key], 'unordered',
2422
                    True).next().get_bytes_as('fulltext'),
2423
                target.get_record_stream([key], 'unordered',
2424
                    True).next().get_bytes_as('fulltext')
2425
                )
2426
2427
    def test_keys(self):
2428
        # While use is discouraged, versions() is still needed by aspects of
2429
        # bzr.
2430
        files = self.get_versionedfiles()
2431
        self.assertEqual(set(), set(files.keys()))
2432
        if self.key_length == 1:
2433
            key = ('foo',)
2434
        else:
2435
            key = ('foo', 'bar',)
2436
        files.add_lines(key, (), [])
2437
        self.assertEqual(set([key]), set(files.keys()))
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
2438
2439
2440
class VirtualVersionedFilesTests(TestCase):
2441
    """Basic tests for the VirtualVersionedFiles implementations."""
2442
2443
    def _get_parent_map(self, keys):
2444
        ret = {}
2445
        for k in keys:
2446
            if k in self._parent_map:
2447
                ret[k] = self._parent_map[k]
2448
        return ret
2449
2450
    def setUp(self):
2451
        TestCase.setUp(self)
2452
        self._lines = {}
2453
        self._parent_map = {}
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
2454
        self.texts = VirtualVersionedFiles(self._get_parent_map,
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
2455
                                           self._lines.get)
2456
2457
    def test_add_lines(self):
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
2458
        self.assertRaises(NotImplementedError,
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
2459
                self.texts.add_lines, "foo", [], [])
2460
2461
    def test_add_mpdiffs(self):
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
2462
        self.assertRaises(NotImplementedError,
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
2463
                self.texts.add_mpdiffs, [])
2464
2465
    def test_check(self):
2466
        self.assertTrue(self.texts.check())
2467
2468
    def test_insert_record_stream(self):
2469
        self.assertRaises(NotImplementedError, self.texts.insert_record_stream,
2470
                          [])
2471
3518.1.2 by Jelmer Vernooij
Fix some stylistic issues pointed out by Ian.
2472
    def test_get_sha1s_nonexistent(self):
2473
        self.assertEquals({}, self.texts.get_sha1s([("NONEXISTENT",)]))
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
2474
2475
    def test_get_sha1s(self):
2476
        self._lines["key"] = ["dataline1", "dataline2"]
2477
        self.assertEquals({("key",): osutils.sha_strings(self._lines["key"])},
2478
                           self.texts.get_sha1s([("key",)]))
2479
2480
    def test_get_parent_map(self):
2481
        self._parent_map = {"G": ("A", "B")}
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
2482
        self.assertEquals({("G",): (("A",),("B",))},
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
2483
                          self.texts.get_parent_map([("G",), ("L",)]))
2484
2485
    def test_get_record_stream(self):
2486
        self._lines["A"] = ["FOO", "BAR"]
2487
        it = self.texts.get_record_stream([("A",)], "unordered", True)
2488
        record = it.next()
3890.2.2 by John Arbash Meinel
Change the signature to report the storage kind as 'chunked'
2489
        self.assertEquals("chunked", record.storage_kind)
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
2490
        self.assertEquals("FOOBAR", record.get_bytes_as("fulltext"))
3890.2.2 by John Arbash Meinel
Change the signature to report the storage kind as 'chunked'
2491
        self.assertEquals(["FOO", "BAR"], record.get_bytes_as("chunked"))
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
2492
2493
    def test_get_record_stream_absent(self):
2494
        it = self.texts.get_record_stream([("A",)], "unordered", True)
2495
        record = it.next()
2496
        self.assertEquals("absent", record.storage_kind)
2497
3949.4.1 by Jelmer Vernooij
Implement VirtualVersionedFiles.iter_lines_added_or_present_in_keys.
2498
    def test_iter_lines_added_or_present_in_keys(self):
2499
        self._lines["A"] = ["FOO", "BAR"]
2500
        self._lines["B"] = ["HEY"]
2501
        self._lines["C"] = ["Alberta"]
2502
        it = self.texts.iter_lines_added_or_present_in_keys([("A",), ("B",)])
4032.1.2 by John Arbash Meinel
Track down a few more files that have trailing whitespace.
2503
        self.assertEquals(sorted([("FOO", "A"), ("BAR", "A"), ("HEY", "B")]),
3949.4.1 by Jelmer Vernooij
Implement VirtualVersionedFiles.iter_lines_added_or_present_in_keys.
2504
            sorted(list(it)))
2505
3871.4.1 by John Arbash Meinel
Add a VFDecorator that can yield records in a specified order
2506
2507
class TestOrderingVersionedFilesDecorator(TestCaseWithMemoryTransport):
2508
2509
    def get_ordering_vf(self, key_priority):
2510
        builder = self.make_branch_builder('test')
2511
        builder.start_series()
2512
        builder.build_snapshot('A', None, [
2513
            ('add', ('', 'TREE_ROOT', 'directory', None))])
2514
        builder.build_snapshot('B', ['A'], [])
2515
        builder.build_snapshot('C', ['B'], [])
2516
        builder.build_snapshot('D', ['C'], [])
2517
        builder.finish_series()
2518
        b = builder.get_branch()
2519
        b.lock_read()
2520
        self.addCleanup(b.unlock)
2521
        vf = b.repository.inventories
2522
        return versionedfile.OrderingVersionedFilesDecorator(vf, key_priority)
2523
2524
    def test_get_empty(self):
2525
        vf = self.get_ordering_vf({})
2526
        self.assertEqual([], vf.calls)
2527
2528
    def test_get_record_stream_topological(self):
2529
        vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2530
        request_keys = [('B',), ('C',), ('D',), ('A',)]
2531
        keys = [r.key for r in vf.get_record_stream(request_keys,
2532
                                    'topological', False)]
2533
        # We should have gotten the keys in topological order
2534
        self.assertEqual([('A',), ('B',), ('C',), ('D',)], keys)
2535
        # And recorded that the request was made
2536
        self.assertEqual([('get_record_stream', request_keys, 'topological',
2537
                           False)], vf.calls)
2538
2539
    def test_get_record_stream_ordered(self):
2540
        vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2541
        request_keys = [('B',), ('C',), ('D',), ('A',)]
2542
        keys = [r.key for r in vf.get_record_stream(request_keys,
2543
                                   'unordered', False)]
2544
        # They should be returned based on their priority
2545
        self.assertEqual([('D',), ('B',), ('A',), ('C',)], keys)
2546
        # And the request recorded
2547
        self.assertEqual([('get_record_stream', request_keys, 'unordered',
2548
                           False)], vf.calls)
2549
2550
    def test_get_record_stream_implicit_order(self):
2551
        vf = self.get_ordering_vf({('B',): 2, ('D',): 1})
2552
        request_keys = [('B',), ('C',), ('D',), ('A',)]
2553
        keys = [r.key for r in vf.get_record_stream(request_keys,
2554
                                   'unordered', False)]
2555
        # A and C are not in the map, so they get sorted to the front. A comes
2556
        # before C alphabetically, so it comes back first
2557
        self.assertEqual([('A',), ('C',), ('D',), ('B',)], keys)
2558
        # And the request recorded
2559
        self.assertEqual([('get_record_stream', request_keys, 'unordered',
2560
                           False)], vf.calls)