/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar
4103.3.4 by Martin Pool
Update test that depends on progress bar strings
1
# Copyright (C) 2005, 2009 Canonical Ltd
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
2
#
3
# Authors:
4
#   Johan Rydberg <jrydberg@gnu.org>
5
#
6
# This program is free software; you can redistribute it and/or modify
7
# it under the terms of the GNU General Public License as published by
8
# the Free Software Foundation; either version 2 of the License, or
9
# (at your option) any later version.
1887.1.1 by Adeodato Simó
Do not separate paragraphs in the copyright statement with blank lines,
10
#
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
11
# This program is distributed in the hope that it will be useful,
12
# but WITHOUT ANY WARRANTY; without even the implied warranty of
13
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14
# GNU General Public License for more details.
1887.1.1 by Adeodato Simó
Do not separate paragraphs in the copyright statement with blank lines,
15
#
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
16
# You should have received a copy of the GNU General Public License
17
# along with this program; if not, write to the Free Software
18
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
20
1704.2.15 by Martin Pool
Remove TODO about knit testing printed from test suite
21
# TODO: might be nice to create a versionedfile with some type of corruption
22
# considered typical and check that it can be detected/corrected.
23
4005.3.3 by Robert Collins
Test NetworkRecordStream with delta'd texts.
24
from itertools import chain, izip
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
25
from StringIO import StringIO
26
1563.2.6 by Robert Collins
Start check tests for knits (pending), and remove dead code.
27
import bzrlib
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
28
from bzrlib import (
29
    errors,
2309.4.7 by John Arbash Meinel
Update VersionedFile tests to ensure that they can take Unicode,
30
    osutils,
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
31
    progress,
32
    )
1563.2.11 by Robert Collins
Consolidate reweave and join as we have no separate usage, make reweave tests apply to all versionedfile implementations and deprecate the old reweave apis.
33
from bzrlib.errors import (
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
34
                           RevisionNotPresent,
1563.2.11 by Robert Collins
Consolidate reweave and join as we have no separate usage, make reweave tests apply to all versionedfile implementations and deprecate the old reweave apis.
35
                           RevisionAlreadyPresent,
36
                           WeaveParentMismatch
37
                           )
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
38
from bzrlib import knit as _mod_knit
2770.1.1 by Aaron Bentley
Initial implmentation of plain knit annotation
39
from bzrlib.knit import (
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
40
    cleanup_pack_knit,
41
    make_file_factory,
42
    make_pack_factory,
2770.1.1 by Aaron Bentley
Initial implmentation of plain knit annotation
43
    KnitAnnotateFactory,
2770.1.10 by Aaron Bentley
Merge bzr.dev
44
    KnitPlainFactory,
2770.1.1 by Aaron Bentley
Initial implmentation of plain knit annotation
45
    )
3350.3.14 by Robert Collins
Deprecate VersionedFile.join.
46
from bzrlib.symbol_versioning import one_four, one_five
3350.6.2 by Robert Collins
Prepare parameterised test environment.
47
from bzrlib.tests import (
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
48
    TestCase,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
49
    TestCaseWithMemoryTransport,
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
50
    TestNotApplicable,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
51
    TestSkipped,
52
    condition_isinstance,
53
    split_suite_by_condition,
4084.5.1 by Robert Collins
Bulk update all test adaptation into a single approach, using multiply_tests rather than test adapters.
54
    multiply_tests,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
55
    )
2929.3.8 by Vincent Ladeuil
Rename bzrlib/test/HTTPTestUtils.py to bzrlib/tests/http_utils.py and fix uses.
56
from bzrlib.tests.http_utils import TestCaseWithWebserver
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
57
from bzrlib.trace import mutter
1563.2.16 by Robert Collins
Change WeaveStore into VersionedFileStore and make its versoined file class parameterisable.
58
from bzrlib.transport import get_transport
1563.2.13 by Robert Collins
InterVersionedFile implemented.
59
from bzrlib.transport.memory import MemoryTransport
1684.3.1 by Robert Collins
Fix versioned file joins with empty targets.
60
from bzrlib.tsort import topo_sort
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
61
from bzrlib.tuned_gzip import GzipFile
1563.2.12 by Robert Collins
Checkpointing: created InterObject to factor out common inter object worker code, added InterVersionedFile and tests to allow making join work between any versionedfile.
62
import bzrlib.versionedfile as versionedfile
3350.6.2 by Robert Collins
Prepare parameterised test environment.
63
from bzrlib.versionedfile import (
64
    ConstantMapper,
65
    HashEscapedPrefixMapper,
66
    PrefixMapper,
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
67
    VirtualVersionedFiles,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
68
    make_versioned_files_factory,
69
    )
1563.2.9 by Robert Collins
Update versionedfile api tests to ensure that data is available after every operation.
70
from bzrlib.weave import WeaveFile
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
71
from bzrlib.weavefile import read_weave, write_weave
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
72
73
3350.6.2 by Robert Collins
Prepare parameterised test environment.
74
def load_tests(standard_tests, module, loader):
75
    """Parameterize VersionedFiles tests for different implementations."""
76
    to_adapt, result = split_suite_by_condition(
77
        standard_tests, condition_isinstance(TestVersionedFiles))
78
    # We want to be sure of behaviour for:
79
    # weaves prefix layout (weave texts)
80
    # individually named weaves (weave inventories)
81
    # annotated knits - prefix|hash|hash-escape layout, we test the third only
82
    #                   as it is the most complex mapper.
83
    # individually named knits
84
    # individual no-graph knits in packs (signatures)
85
    # individual graph knits in packs (inventories)
86
    # individual graph nocompression knits in packs (revisions)
87
    # plain text knits in packs (texts)
4084.5.1 by Robert Collins
Bulk update all test adaptation into a single approach, using multiply_tests rather than test adapters.
88
    len_one_scenarios = [
3350.6.2 by Robert Collins
Prepare parameterised test environment.
89
        ('weave-named', {
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
90
            'cleanup':None,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
91
            'factory':make_versioned_files_factory(WeaveFile,
92
                ConstantMapper('inventory')),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
93
            'graph':True,
94
            'key_length':1,
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
95
            'support_partial_insertion': False,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
96
            }),
97
        ('named-knit', {
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
98
            'cleanup':None,
99
            'factory':make_file_factory(False, ConstantMapper('revisions')),
100
            'graph':True,
101
            'key_length':1,
4009.3.7 by Andrew Bennetts
Most tests passing.
102
            'support_partial_insertion': False,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
103
            }),
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
104
        ('named-nograph-nodelta-knit-pack', {
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
105
            'cleanup':cleanup_pack_knit,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
106
            'factory':make_pack_factory(False, False, 1),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
107
            'graph':False,
108
            'key_length':1,
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
109
            'support_partial_insertion': False,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
110
            }),
111
        ('named-graph-knit-pack', {
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
112
            'cleanup':cleanup_pack_knit,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
113
            'factory':make_pack_factory(True, True, 1),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
114
            'graph':True,
115
            'key_length':1,
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
116
            'support_partial_insertion': True,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
117
            }),
118
        ('named-graph-nodelta-knit-pack', {
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
119
            'cleanup':cleanup_pack_knit,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
120
            'factory':make_pack_factory(True, False, 1),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
121
            'graph':True,
122
            'key_length':1,
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
123
            'support_partial_insertion': False,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
124
            }),
125
        ]
4084.5.1 by Robert Collins
Bulk update all test adaptation into a single approach, using multiply_tests rather than test adapters.
126
    len_two_scenarios = [
3350.6.2 by Robert Collins
Prepare parameterised test environment.
127
        ('weave-prefix', {
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
128
            'cleanup':None,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
129
            'factory':make_versioned_files_factory(WeaveFile,
130
                PrefixMapper()),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
131
            'graph':True,
132
            'key_length':2,
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
133
            'support_partial_insertion': False,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
134
            }),
135
        ('annotated-knit-escape', {
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
136
            'cleanup':None,
137
            'factory':make_file_factory(True, HashEscapedPrefixMapper()),
138
            'graph':True,
139
            'key_length':2,
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
140
            'support_partial_insertion': False,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
141
            }),
142
        ('plain-knit-pack', {
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
143
            'cleanup':cleanup_pack_knit,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
144
            'factory':make_pack_factory(True, True, 2),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
145
            'graph':True,
146
            'key_length':2,
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
147
            'support_partial_insertion': True,
3350.6.2 by Robert Collins
Prepare parameterised test environment.
148
            }),
149
        ]
4084.5.1 by Robert Collins
Bulk update all test adaptation into a single approach, using multiply_tests rather than test adapters.
150
    scenarios = len_one_scenarios + len_two_scenarios
151
    return multiply_tests(to_adapt, scenarios, result)
3350.6.2 by Robert Collins
Prepare parameterised test environment.
152
153
3350.3.11 by Robert Collins
Test inserting a stream that overlaps the current content of a knit does not error.
154
def get_diamond_vf(f, trailing_eol=True, left_only=False):
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
155
    """Get a diamond graph to exercise deltas and merges.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
156
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
157
    :param trailing_eol: If True end the last line with \n.
158
    """
159
    parents = {
160
        'origin': (),
161
        'base': (('origin',),),
162
        'left': (('base',),),
163
        'right': (('base',),),
164
        'merged': (('left',), ('right',)),
165
        }
166
    # insert a diamond graph to exercise deltas and merges.
167
    if trailing_eol:
168
        last_char = '\n'
169
    else:
170
        last_char = ''
171
    f.add_lines('origin', [], ['origin' + last_char])
172
    f.add_lines('base', ['origin'], ['base' + last_char])
173
    f.add_lines('left', ['base'], ['base\n', 'left' + last_char])
3350.3.11 by Robert Collins
Test inserting a stream that overlaps the current content of a knit does not error.
174
    if not left_only:
175
        f.add_lines('right', ['base'],
176
            ['base\n', 'right' + last_char])
177
        f.add_lines('merged', ['left', 'right'],
178
            ['base\n', 'left\n', 'right\n', 'merged' + last_char])
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
179
    return f, parents
180
181
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
182
def get_diamond_files(files, key_length, trailing_eol=True, left_only=False,
3735.2.5 by Robert Collins
Teach VersionedFiles how to allocate keys based on content hashes.
183
    nograph=False, nokeys=False):
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
184
    """Get a diamond graph to exercise deltas and merges.
185
186
    This creates a 5-node graph in files. If files supports 2-length keys two
187
    graphs are made to exercise the support for multiple ids.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
188
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
189
    :param trailing_eol: If True end the last line with \n.
190
    :param key_length: The length of keys in files. Currently supports length 1
191
        and 2 keys.
192
    :param left_only: If True do not add the right and merged nodes.
193
    :param nograph: If True, do not provide parents to the add_lines calls;
194
        this is useful for tests that need inserted data but have graphless
195
        stores.
3735.2.5 by Robert Collins
Teach VersionedFiles how to allocate keys based on content hashes.
196
    :param nokeys: If True, pass None is as the key for all insertions.
197
        Currently implies nograph.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
198
    :return: The results of the add_lines calls.
199
    """
3735.2.5 by Robert Collins
Teach VersionedFiles how to allocate keys based on content hashes.
200
    if nokeys:
201
        nograph = True
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
202
    if key_length == 1:
203
        prefixes = [()]
204
    else:
205
        prefixes = [('FileA',), ('FileB',)]
206
    # insert a diamond graph to exercise deltas and merges.
207
    if trailing_eol:
208
        last_char = '\n'
209
    else:
210
        last_char = ''
211
    result = []
212
    def get_parents(suffix_list):
213
        if nograph:
214
            return ()
215
        else:
216
            result = [prefix + suffix for suffix in suffix_list]
217
            return result
3735.2.5 by Robert Collins
Teach VersionedFiles how to allocate keys based on content hashes.
218
    def get_key(suffix):
219
        if nokeys:
220
            return (None, )
221
        else:
222
            return (suffix,)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
223
    # we loop over each key because that spreads the inserts across prefixes,
224
    # which is how commit operates.
225
    for prefix in prefixes:
3735.2.5 by Robert Collins
Teach VersionedFiles how to allocate keys based on content hashes.
226
        result.append(files.add_lines(prefix + get_key('origin'), (),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
227
            ['origin' + last_char]))
228
    for prefix in prefixes:
3735.2.5 by Robert Collins
Teach VersionedFiles how to allocate keys based on content hashes.
229
        result.append(files.add_lines(prefix + get_key('base'),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
230
            get_parents([('origin',)]), ['base' + last_char]))
231
    for prefix in prefixes:
3735.2.5 by Robert Collins
Teach VersionedFiles how to allocate keys based on content hashes.
232
        result.append(files.add_lines(prefix + get_key('left'),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
233
            get_parents([('base',)]),
234
            ['base\n', 'left' + last_char]))
235
    if not left_only:
236
        for prefix in prefixes:
3735.2.5 by Robert Collins
Teach VersionedFiles how to allocate keys based on content hashes.
237
            result.append(files.add_lines(prefix + get_key('right'),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
238
                get_parents([('base',)]),
239
                ['base\n', 'right' + last_char]))
240
        for prefix in prefixes:
3735.2.5 by Robert Collins
Teach VersionedFiles how to allocate keys based on content hashes.
241
            result.append(files.add_lines(prefix + get_key('merged'),
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
242
                get_parents([('left',), ('right',)]),
243
                ['base\n', 'left\n', 'right\n', 'merged' + last_char]))
244
    return result
245
246
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
247
class VersionedFileTestMixIn(object):
248
    """A mixin test class for testing VersionedFiles.
249
250
    This is not an adaptor-style test at this point because
251
    theres no dynamic substitution of versioned file implementations,
252
    they are strictly controlled by their owning repositories.
253
    """
254
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
255
    def get_transaction(self):
256
        if not hasattr(self, '_transaction'):
257
            self._transaction = None
258
        return self._transaction
259
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
260
    def test_add(self):
261
        f = self.get_file()
262
        f.add_lines('r0', [], ['a\n', 'b\n'])
263
        f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
1563.2.9 by Robert Collins
Update versionedfile api tests to ensure that data is available after every operation.
264
        def verify_file(f):
265
            versions = f.versions()
266
            self.assertTrue('r0' in versions)
267
            self.assertTrue('r1' in versions)
268
            self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
269
            self.assertEquals(f.get_text('r0'), 'a\nb\n')
270
            self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
1563.2.18 by Robert Collins
get knit repositories really using knits for text storage.
271
            self.assertEqual(2, len(f))
272
            self.assertEqual(2, f.num_versions())
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
273
1563.2.9 by Robert Collins
Update versionedfile api tests to ensure that data is available after every operation.
274
            self.assertRaises(RevisionNotPresent,
275
                f.add_lines, 'r2', ['foo'], [])
276
            self.assertRaises(RevisionAlreadyPresent,
277
                f.add_lines, 'r1', [], [])
278
        verify_file(f)
1666.1.6 by Robert Collins
Make knit the default format.
279
        # this checks that reopen with create=True does not break anything.
280
        f = self.reopen_file(create=True)
1563.2.9 by Robert Collins
Update versionedfile api tests to ensure that data is available after every operation.
281
        verify_file(f)
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
282
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
283
    def test_adds_with_parent_texts(self):
284
        f = self.get_file()
285
        parent_texts = {}
2776.1.1 by Robert Collins
* The ``add_lines`` methods on ``VersionedFile`` implementations has changed
286
        _, _, parent_texts['r0'] = f.add_lines('r0', [], ['a\n', 'b\n'])
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
287
        try:
2776.1.1 by Robert Collins
* The ``add_lines`` methods on ``VersionedFile`` implementations has changed
288
            _, _, parent_texts['r1'] = f.add_lines_with_ghosts('r1',
289
                ['r0', 'ghost'], ['b\n', 'c\n'], parent_texts=parent_texts)
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
290
        except NotImplementedError:
291
            # if the format doesn't support ghosts, just add normally.
2776.1.1 by Robert Collins
* The ``add_lines`` methods on ``VersionedFile`` implementations has changed
292
            _, _, parent_texts['r1'] = f.add_lines('r1',
293
                ['r0'], ['b\n', 'c\n'], parent_texts=parent_texts)
1596.2.32 by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.
294
        f.add_lines('r2', ['r1'], ['c\n', 'd\n'], parent_texts=parent_texts)
295
        self.assertNotEqual(None, parent_texts['r0'])
296
        self.assertNotEqual(None, parent_texts['r1'])
297
        def verify_file(f):
298
            versions = f.versions()
299
            self.assertTrue('r0' in versions)
300
            self.assertTrue('r1' in versions)
301
            self.assertTrue('r2' in versions)
302
            self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
303
            self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
304
            self.assertEquals(f.get_lines('r2'), ['c\n', 'd\n'])
305
            self.assertEqual(3, f.num_versions())
306
            origins = f.annotate('r1')
307
            self.assertEquals(origins[0][0], 'r0')
308
            self.assertEquals(origins[1][0], 'r1')
309
            origins = f.annotate('r2')
310
            self.assertEquals(origins[0][0], 'r1')
311
            self.assertEquals(origins[1][0], 'r2')
312
313
        verify_file(f)
314
        f = self.reopen_file()
315
        verify_file(f)
316
2805.6.7 by Robert Collins
Review feedback.
317
    def test_add_unicode_content(self):
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
318
        # unicode content is not permitted in versioned files.
2805.6.7 by Robert Collins
Review feedback.
319
        # versioned files version sequences of bytes only.
320
        vf = self.get_file()
321
        self.assertRaises(errors.BzrBadParameterUnicode,
322
            vf.add_lines, 'a', [], ['a\n', u'b\n', 'c\n'])
323
        self.assertRaises(
324
            (errors.BzrBadParameterUnicode, NotImplementedError),
325
            vf.add_lines_with_ghosts, 'a', [], ['a\n', u'b\n', 'c\n'])
326
2520.4.150 by Aaron Bentley
Test that non-Weave uses left_matching_blocks for add_lines
327
    def test_add_follows_left_matching_blocks(self):
328
        """If we change left_matching_blocks, delta changes
329
330
        Note: There are multiple correct deltas in this case, because
331
        we start with 1 "a" and we get 3.
332
        """
333
        vf = self.get_file()
334
        if isinstance(vf, WeaveFile):
335
            raise TestSkipped("WeaveFile ignores left_matching_blocks")
336
        vf.add_lines('1', [], ['a\n'])
337
        vf.add_lines('2', ['1'], ['a\n', 'a\n', 'a\n'],
338
                     left_matching_blocks=[(0, 0, 1), (1, 3, 0)])
2794.1.2 by Robert Collins
Nuke versioned file add/get delta support, allowing easy simplification of unannotated Content, reducing memory copies and friction during commit on unannotated texts.
339
        self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('2'))
2520.4.150 by Aaron Bentley
Test that non-Weave uses left_matching_blocks for add_lines
340
        vf.add_lines('3', ['1'], ['a\n', 'a\n', 'a\n'],
341
                     left_matching_blocks=[(0, 2, 1), (1, 3, 0)])
2794.1.2 by Robert Collins
Nuke versioned file add/get delta support, allowing easy simplification of unannotated Content, reducing memory copies and friction during commit on unannotated texts.
342
        self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('3'))
2520.4.150 by Aaron Bentley
Test that non-Weave uses left_matching_blocks for add_lines
343
2805.6.7 by Robert Collins
Review feedback.
344
    def test_inline_newline_throws(self):
345
        # \r characters are not permitted in lines being added
346
        vf = self.get_file()
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
347
        self.assertRaises(errors.BzrBadParameterContainsNewline,
2805.6.7 by Robert Collins
Review feedback.
348
            vf.add_lines, 'a', [], ['a\n\n'])
349
        self.assertRaises(
350
            (errors.BzrBadParameterContainsNewline, NotImplementedError),
351
            vf.add_lines_with_ghosts, 'a', [], ['a\n\n'])
352
        # but inline CR's are allowed
353
        vf.add_lines('a', [], ['a\r\n'])
354
        try:
355
            vf.add_lines_with_ghosts('b', [], ['a\r\n'])
356
        except NotImplementedError:
357
            pass
358
2229.2.1 by Aaron Bentley
Reject reserved ids in versiondfile, tree, branch and repository
359
    def test_add_reserved(self):
360
        vf = self.get_file()
361
        self.assertRaises(errors.ReservedId,
362
            vf.add_lines, 'a:', [], ['a\n', 'b\n', 'c\n'])
363
2794.1.1 by Robert Collins
Allow knits to be instructed not to add a text based on a sha, for commit.
364
    def test_add_lines_nostoresha(self):
365
        """When nostore_sha is supplied using old content raises."""
366
        vf = self.get_file()
367
        empty_text = ('a', [])
368
        sample_text_nl = ('b', ["foo\n", "bar\n"])
369
        sample_text_no_nl = ('c', ["foo\n", "bar"])
370
        shas = []
371
        for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
372
            sha, _, _ = vf.add_lines(version, [], lines)
373
            shas.append(sha)
374
        # we now have a copy of all the lines in the vf.
375
        for sha, (version, lines) in zip(
376
            shas, (empty_text, sample_text_nl, sample_text_no_nl)):
377
            self.assertRaises(errors.ExistingContent,
378
                vf.add_lines, version + "2", [], lines,
379
                nostore_sha=sha)
380
            # and no new version should have been added.
381
            self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
382
                version + "2")
383
2803.1.1 by Robert Collins
Fix typo in ghosts version of test_add_lines_nostoresha.
384
    def test_add_lines_with_ghosts_nostoresha(self):
2794.1.1 by Robert Collins
Allow knits to be instructed not to add a text based on a sha, for commit.
385
        """When nostore_sha is supplied using old content raises."""
386
        vf = self.get_file()
387
        empty_text = ('a', [])
388
        sample_text_nl = ('b', ["foo\n", "bar\n"])
389
        sample_text_no_nl = ('c', ["foo\n", "bar"])
390
        shas = []
391
        for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
392
            sha, _, _ = vf.add_lines(version, [], lines)
393
            shas.append(sha)
394
        # we now have a copy of all the lines in the vf.
395
        # is the test applicable to this vf implementation?
396
        try:
397
            vf.add_lines_with_ghosts('d', [], [])
398
        except NotImplementedError:
399
            raise TestSkipped("add_lines_with_ghosts is optional")
400
        for sha, (version, lines) in zip(
401
            shas, (empty_text, sample_text_nl, sample_text_no_nl)):
402
            self.assertRaises(errors.ExistingContent,
403
                vf.add_lines_with_ghosts, version + "2", [], lines,
404
                nostore_sha=sha)
405
            # and no new version should have been added.
406
            self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
407
                version + "2")
408
2776.1.1 by Robert Collins
* The ``add_lines`` methods on ``VersionedFile`` implementations has changed
409
    def test_add_lines_return_value(self):
410
        # add_lines should return the sha1 and the text size.
411
        vf = self.get_file()
412
        empty_text = ('a', [])
413
        sample_text_nl = ('b', ["foo\n", "bar\n"])
414
        sample_text_no_nl = ('c', ["foo\n", "bar"])
415
        # check results for the three cases:
416
        for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
417
            # the first two elements are the same for all versioned files:
418
            # - the digest and the size of the text. For some versioned files
419
            #   additional data is returned in additional tuple elements.
420
            result = vf.add_lines(version, [], lines)
421
            self.assertEqual(3, len(result))
422
            self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
423
                result[0:2])
424
        # parents should not affect the result:
425
        lines = sample_text_nl[1]
426
        self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
427
            vf.add_lines('d', ['b', 'c'], lines)[0:2])
428
2229.2.1 by Aaron Bentley
Reject reserved ids in versiondfile, tree, branch and repository
429
    def test_get_reserved(self):
430
        vf = self.get_file()
431
        self.assertRaises(errors.ReservedId, vf.get_texts, ['b:'])
432
        self.assertRaises(errors.ReservedId, vf.get_lines, 'b:')
433
        self.assertRaises(errors.ReservedId, vf.get_text, 'b:')
434
3468.2.4 by Martin Pool
Test and fix #234748 problems in trailing newline diffs
435
    def test_add_unchanged_last_line_noeol_snapshot(self):
436
        """Add a text with an unchanged last line with no eol should work."""
437
        # Test adding this in a number of chain lengths; because the interface
438
        # for VersionedFile does not allow forcing a specific chain length, we
439
        # just use a small base to get the first snapshot, then a much longer
440
        # first line for the next add (which will make the third add snapshot)
441
        # and so on. 20 has been chosen as an aribtrary figure - knits use 200
442
        # as a capped delta length, but ideally we would have some way of
443
        # tuning the test to the store (e.g. keep going until a snapshot
444
        # happens).
445
        for length in range(20):
446
            version_lines = {}
447
            vf = self.get_file('case-%d' % length)
448
            prefix = 'step-%d'
449
            parents = []
450
            for step in range(length):
451
                version = prefix % step
452
                lines = (['prelude \n'] * step) + ['line']
453
                vf.add_lines(version, parents, lines)
454
                version_lines[version] = lines
455
                parents = [version]
456
            vf.add_lines('no-eol', parents, ['line'])
457
            vf.get_texts(version_lines.keys())
458
            self.assertEqualDiff('line', vf.get_text('no-eol'))
459
460
    def test_get_texts_eol_variation(self):
461
        # similar to the failure in <http://bugs.launchpad.net/234748>
462
        vf = self.get_file()
463
        sample_text_nl = ["line\n"]
464
        sample_text_no_nl = ["line"]
465
        versions = []
466
        version_lines = {}
467
        parents = []
468
        for i in range(4):
469
            version = 'v%d' % i
470
            if i % 2:
471
                lines = sample_text_nl
472
            else:
473
                lines = sample_text_no_nl
474
            # left_matching blocks is an internal api; it operates on the
475
            # *internal* representation for a knit, which is with *all* lines
476
            # being normalised to end with \n - even the final line in a no_nl
477
            # file. Using it here ensures that a broken internal implementation
478
            # (which is what this test tests) will generate a correct line
479
            # delta (which is to say, an empty delta).
480
            vf.add_lines(version, parents, lines,
481
                left_matching_blocks=[(0, 0, 1)])
482
            parents = [version]
483
            versions.append(version)
484
            version_lines[version] = lines
485
        vf.check()
486
        vf.get_texts(versions)
487
        vf.get_texts(reversed(versions))
488
3460.2.1 by Robert Collins
* Inserting a bundle which changes the contents of a file with no trailing
489
    def test_add_lines_with_matching_blocks_noeol_last_line(self):
490
        """Add a text with an unchanged last line with no eol should work."""
491
        from bzrlib import multiparent
492
        # Hand verified sha1 of the text we're adding.
493
        sha1 = '6a1d115ec7b60afb664dc14890b5af5ce3c827a4'
494
        # Create a mpdiff which adds a new line before the trailing line, and
495
        # reuse the last line unaltered (which can cause annotation reuse).
496
        # Test adding this in two situations:
497
        # On top of a new insertion
498
        vf = self.get_file('fulltext')
499
        vf.add_lines('noeol', [], ['line'])
500
        vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
501
            left_matching_blocks=[(0, 1, 1)])
502
        self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
503
        # On top of a delta
504
        vf = self.get_file('delta')
505
        vf.add_lines('base', [], ['line'])
506
        vf.add_lines('noeol', ['base'], ['prelude\n', 'line'])
507
        vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
508
            left_matching_blocks=[(1, 1, 1)])
509
        self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
510
2520.4.85 by Aaron Bentley
Get all test passing (which just proves there aren't enough tests!)
511
    def test_make_mpdiffs(self):
2520.4.3 by Aaron Bentley
Implement plain strategy for extracting and installing multiparent diffs
512
        from bzrlib import multiparent
513
        vf = self.get_file('foo')
514
        sha1s = self._setup_for_deltas(vf)
515
        new_vf = self.get_file('bar')
516
        for version in multiparent.topo_iter(vf):
2520.4.85 by Aaron Bentley
Get all test passing (which just proves there aren't enough tests!)
517
            mpdiff = vf.make_mpdiffs([version])[0]
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
518
            new_vf.add_mpdiffs([(version, vf.get_parent_map([version])[version],
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
519
                                 vf.get_sha1s([version])[version], mpdiff)])
2520.4.3 by Aaron Bentley
Implement plain strategy for extracting and installing multiparent diffs
520
            self.assertEqualDiff(vf.get_text(version),
521
                                 new_vf.get_text(version))
522
3453.3.2 by John Arbash Meinel
Add a test case for the first loop, unable to find a way to trigger the second loop
523
    def test_make_mpdiffs_with_ghosts(self):
524
        vf = self.get_file('foo')
3453.3.4 by John Arbash Meinel
Skip the new test for old weave formats that don't support ghosts
525
        try:
526
            vf.add_lines_with_ghosts('text', ['ghost'], ['line\n'])
527
        except NotImplementedError:
528
            # old Weave formats do not allow ghosts
529
            return
3453.3.2 by John Arbash Meinel
Add a test case for the first loop, unable to find a way to trigger the second loop
530
        self.assertRaises(errors.RevisionNotPresent, vf.make_mpdiffs, ['ghost'])
531
1596.2.38 by Robert Collins
rollback from using deltas to using fulltexts - deltas need more work to be ready.
532
    def _setup_for_deltas(self, f):
2794.1.2 by Robert Collins
Nuke versioned file add/get delta support, allowing easy simplification of unannotated Content, reducing memory copies and friction during commit on unannotated texts.
533
        self.assertFalse(f.has_version('base'))
1596.2.36 by Robert Collins
add a get_delta api to versioned_file.
534
        # add texts that should trip the knit maximum delta chain threshold
535
        # as well as doing parallel chains of data in knits.
536
        # this is done by two chains of 25 insertions
537
        f.add_lines('base', [], ['line\n'])
1596.2.38 by Robert Collins
rollback from using deltas to using fulltexts - deltas need more work to be ready.
538
        f.add_lines('noeol', ['base'], ['line'])
539
        # detailed eol tests:
540
        # shared last line with parent no-eol
541
        f.add_lines('noeolsecond', ['noeol'], ['line\n', 'line'])
542
        # differing last line with parent, both no-eol
543
        f.add_lines('noeolnotshared', ['noeolsecond'], ['line\n', 'phone'])
544
        # add eol following a noneol parent, change content
545
        f.add_lines('eol', ['noeol'], ['phone\n'])
546
        # add eol following a noneol parent, no change content
547
        f.add_lines('eolline', ['noeol'], ['line\n'])
548
        # noeol with no parents:
549
        f.add_lines('noeolbase', [], ['line'])
550
        # noeol preceeding its leftmost parent in the output:
551
        # this is done by making it a merge of two parents with no common
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
552
        # anestry: noeolbase and noeol with the
1596.2.38 by Robert Collins
rollback from using deltas to using fulltexts - deltas need more work to be ready.
553
        # later-inserted parent the leftmost.
554
        f.add_lines('eolbeforefirstparent', ['noeolbase', 'noeol'], ['line'])
555
        # two identical eol texts
556
        f.add_lines('noeoldup', ['noeol'], ['line'])
1596.2.36 by Robert Collins
add a get_delta api to versioned_file.
557
        next_parent = 'base'
558
        text_name = 'chain1-'
559
        text = ['line\n']
560
        sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
561
                 1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
562
                 2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
563
                 3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
564
                 4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
565
                 5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
566
                 6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
567
                 7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
568
                 8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
569
                 9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
570
                 10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
571
                 11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
572
                 12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
573
                 13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
574
                 14:'2c4b1736566b8ca6051e668de68650686a3922f2',
575
                 15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
576
                 16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
577
                 17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
578
                 18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
579
                 19:'1ebed371807ba5935958ad0884595126e8c4e823',
580
                 20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
581
                 21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
582
                 22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
583
                 23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
584
                 24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
585
                 25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
586
                 }
587
        for depth in range(26):
588
            new_version = text_name + '%s' % depth
589
            text = text + ['line\n']
590
            f.add_lines(new_version, [next_parent], text)
591
            next_parent = new_version
592
        next_parent = 'base'
593
        text_name = 'chain2-'
594
        text = ['line\n']
595
        for depth in range(26):
596
            new_version = text_name + '%s' % depth
597
            text = text + ['line\n']
598
            f.add_lines(new_version, [next_parent], text)
599
            next_parent = new_version
1596.2.38 by Robert Collins
rollback from using deltas to using fulltexts - deltas need more work to be ready.
600
        return sha1s
1596.2.37 by Robert Collins
Switch to delta based content copying in the generic versioned file copier.
601
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
602
    def test_ancestry(self):
603
        f = self.get_file()
1563.2.29 by Robert Collins
Remove all but fetch references to repository.revision_store.
604
        self.assertEqual([], f.get_ancestry([]))
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
605
        f.add_lines('r0', [], ['a\n', 'b\n'])
606
        f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
607
        f.add_lines('r2', ['r0'], ['b\n', 'c\n'])
608
        f.add_lines('r3', ['r2'], ['b\n', 'c\n'])
609
        f.add_lines('rM', ['r1', 'r2'], ['b\n', 'c\n'])
1563.2.29 by Robert Collins
Remove all but fetch references to repository.revision_store.
610
        self.assertEqual([], f.get_ancestry([]))
1563.2.35 by Robert Collins
cleanup deprecation warnings and finish conversion so the inventory is knit based too.
611
        versions = f.get_ancestry(['rM'])
612
        # there are some possibilities:
613
        # r0 r1 r2 rM r3
614
        # r0 r1 r2 r3 rM
615
        # etc
616
        # so we check indexes
617
        r0 = versions.index('r0')
618
        r1 = versions.index('r1')
619
        r2 = versions.index('r2')
620
        self.assertFalse('r3' in versions)
621
        rM = versions.index('rM')
622
        self.assertTrue(r0 < r1)
623
        self.assertTrue(r0 < r2)
624
        self.assertTrue(r1 < rM)
625
        self.assertTrue(r2 < rM)
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
626
627
        self.assertRaises(RevisionNotPresent,
628
            f.get_ancestry, ['rM', 'rX'])
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
629
2530.1.1 by Aaron Bentley
Make topological sorting optional for get_ancestry
630
        self.assertEqual(set(f.get_ancestry('rM')),
631
            set(f.get_ancestry('rM', topo_sorted=False)))
632
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
633
    def test_mutate_after_finish(self):
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
634
        self._transaction = 'before'
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
635
        f = self.get_file()
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
636
        self._transaction = 'after'
1594.2.21 by Robert Collins
Teach versioned files to prevent mutation after finishing.
637
        self.assertRaises(errors.OutSideTransaction, f.add_lines, '', [], [])
638
        self.assertRaises(errors.OutSideTransaction, f.add_lines_with_ghosts, '', [], [])
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
639
1563.2.15 by Robert Collins
remove the weavestore assumptions about the number and nature of files it manages.
640
    def test_copy_to(self):
641
        f = self.get_file()
642
        f.add_lines('0', [], ['a\n'])
643
        t = MemoryTransport()
644
        f.copy_to('foo', t)
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
645
        for suffix in self.get_factory().get_suffixes():
1563.2.15 by Robert Collins
remove the weavestore assumptions about the number and nature of files it manages.
646
            self.assertTrue(t.has('foo' + suffix))
647
648
    def test_get_suffixes(self):
649
        f = self.get_file()
650
        # and should be a list
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
651
        self.assertTrue(isinstance(self.get_factory().get_suffixes(), list))
1563.2.15 by Robert Collins
remove the weavestore assumptions about the number and nature of files it manages.
652
3287.5.1 by Robert Collins
Add VersionedFile.get_parent_map.
653
    def test_get_parent_map(self):
654
        f = self.get_file()
655
        f.add_lines('r0', [], ['a\n', 'b\n'])
656
        self.assertEqual(
657
            {'r0':()}, f.get_parent_map(['r0']))
658
        f.add_lines('r1', ['r0'], ['a\n', 'b\n'])
659
        self.assertEqual(
660
            {'r1':('r0',)}, f.get_parent_map(['r1']))
661
        self.assertEqual(
662
            {'r0':(),
663
             'r1':('r0',)},
664
            f.get_parent_map(['r0', 'r1']))
665
        f.add_lines('r2', [], ['a\n', 'b\n'])
666
        f.add_lines('r3', [], ['a\n', 'b\n'])
667
        f.add_lines('m', ['r0', 'r1', 'r2', 'r3'], ['a\n', 'b\n'])
668
        self.assertEqual(
669
            {'m':('r0', 'r1', 'r2', 'r3')}, f.get_parent_map(['m']))
670
        self.assertEqual({}, f.get_parent_map('y'))
671
        self.assertEqual(
672
            {'r0':(),
673
             'r1':('r0',)},
674
            f.get_parent_map(['r0', 'y', 'r1']))
675
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
676
    def test_annotate(self):
677
        f = self.get_file()
678
        f.add_lines('r0', [], ['a\n', 'b\n'])
679
        f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
680
        origins = f.annotate('r1')
681
        self.assertEquals(origins[0][0], 'r1')
682
        self.assertEquals(origins[1][0], 'r0')
683
684
        self.assertRaises(RevisionNotPresent,
685
            f.annotate, 'foo')
686
1563.2.6 by Robert Collins
Start check tests for knits (pending), and remove dead code.
687
    def test_detection(self):
688
        # Test weaves detect corruption.
689
        #
690
        # Weaves contain a checksum of their texts.
691
        # When a text is extracted, this checksum should be
692
        # verified.
693
694
        w = self.get_file_corrupted_text()
695
696
        self.assertEqual('hello\n', w.get_text('v1'))
697
        self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
698
        self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
699
        self.assertRaises(errors.WeaveInvalidChecksum, w.check)
700
701
        w = self.get_file_corrupted_checksum()
702
703
        self.assertEqual('hello\n', w.get_text('v1'))
704
        self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
705
        self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
706
        self.assertRaises(errors.WeaveInvalidChecksum, w.check)
707
708
    def get_file_corrupted_text(self):
709
        """Return a versioned file with corrupt text but valid metadata."""
710
        raise NotImplementedError(self.get_file_corrupted_text)
711
1563.2.9 by Robert Collins
Update versionedfile api tests to ensure that data is available after every operation.
712
    def reopen_file(self, name='foo'):
713
        """Open the versioned file from disk again."""
714
        raise NotImplementedError(self.reopen_file)
715
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
716
    def test_iter_lines_added_or_present_in_versions(self):
717
        # test that we get at least an equalset of the lines added by
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
718
        # versions in the weave
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
719
        # the ordering here is to make a tree so that dumb searches have
720
        # more changes to muck up.
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
721
722
        class InstrumentedProgress(progress.DummyProgress):
723
724
            def __init__(self):
725
726
                progress.DummyProgress.__init__(self)
727
                self.updates = []
728
729
            def update(self, msg=None, current=None, total=None):
730
                self.updates.append((msg, current, total))
731
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
732
        vf = self.get_file()
733
        # add a base to get included
734
        vf.add_lines('base', [], ['base\n'])
735
        # add a ancestor to be included on one side
736
        vf.add_lines('lancestor', [], ['lancestor\n'])
737
        # add a ancestor to be included on the other side
738
        vf.add_lines('rancestor', ['base'], ['rancestor\n'])
739
        # add a child of rancestor with no eofile-nl
740
        vf.add_lines('child', ['rancestor'], ['base\n', 'child\n'])
741
        # add a child of lancestor and base to join the two roots
742
        vf.add_lines('otherchild',
743
                     ['lancestor', 'base'],
744
                     ['base\n', 'lancestor\n', 'otherchild\n'])
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
745
        def iter_with_versions(versions, expected):
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
746
            # now we need to see what lines are returned, and how often.
2975.3.1 by Robert Collins
Change (without backwards compatibility) the
747
            lines = {}
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
748
            progress = InstrumentedProgress()
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
749
            # iterate over the lines
2975.3.1 by Robert Collins
Change (without backwards compatibility) the
750
            for line in vf.iter_lines_added_or_present_in_versions(versions,
2039.1.1 by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000)
751
                pb=progress):
2975.3.1 by Robert Collins
Change (without backwards compatibility) the
752
                lines.setdefault(line, 0)
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
753
                lines[line] += 1
2975.3.1 by Robert Collins
Change (without backwards compatibility) the
754
            if []!= progress.updates:
2039.1.2 by Aaron Bentley
Tweak test to avoid catching assert
755
                self.assertEqual(expected, progress.updates)
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
756
            return lines
2147.1.3 by John Arbash Meinel
In knit.py we were re-using a variable in 2 loops, causing bogus progress messages to be generated.
757
        lines = iter_with_versions(['child', 'otherchild'],
4103.3.4 by Martin Pool
Update test that depends on progress bar strings
758
                                   [('Walking content', 0, 2),
759
                                    ('Walking content', 1, 2),
760
                                    ('Walking content', 2, 2)])
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
761
        # we must see child and otherchild
2975.3.1 by Robert Collins
Change (without backwards compatibility) the
762
        self.assertTrue(lines[('child\n', 'child')] > 0)
763
        self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
764
        # we dont care if we got more than that.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
765
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
766
        # test all lines
4103.3.4 by Martin Pool
Update test that depends on progress bar strings
767
        lines = iter_with_versions(None, [('Walking content', 0, 5),
768
                                          ('Walking content', 1, 5),
769
                                          ('Walking content', 2, 5),
770
                                          ('Walking content', 3, 5),
771
                                          ('Walking content', 4, 5),
772
                                          ('Walking content', 5, 5)])
1594.2.6 by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.
773
        # all lines must be seen at least once
2975.3.1 by Robert Collins
Change (without backwards compatibility) the
774
        self.assertTrue(lines[('base\n', 'base')] > 0)
775
        self.assertTrue(lines[('lancestor\n', 'lancestor')] > 0)
776
        self.assertTrue(lines[('rancestor\n', 'rancestor')] > 0)
777
        self.assertTrue(lines[('child\n', 'child')] > 0)
778
        self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
1594.2.7 by Robert Collins
Add versionedfile.fix_parents api for correcting data post hoc.
779
1594.2.8 by Robert Collins
add ghost aware apis to knits.
780
    def test_add_lines_with_ghosts(self):
781
        # some versioned file formats allow lines to be added with parent
782
        # information that is > than that in the format. Formats that do
783
        # not support this need to raise NotImplementedError on the
784
        # add_lines_with_ghosts api.
785
        vf = self.get_file()
786
        # add a revision with ghost parents
2249.5.12 by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8
787
        # The preferred form is utf8, but we should translate when needed
788
        parent_id_unicode = u'b\xbfse'
789
        parent_id_utf8 = parent_id_unicode.encode('utf8')
1594.2.8 by Robert Collins
add ghost aware apis to knits.
790
        try:
2309.4.7 by John Arbash Meinel
Update VersionedFile tests to ensure that they can take Unicode,
791
            vf.add_lines_with_ghosts('notbxbfse', [parent_id_utf8], [])
1594.2.8 by Robert Collins
add ghost aware apis to knits.
792
        except NotImplementedError:
793
            # check the other ghost apis are also not implemented
794
            self.assertRaises(NotImplementedError, vf.get_ancestry_with_ghosts, ['foo'])
795
            self.assertRaises(NotImplementedError, vf.get_parents_with_ghosts, 'foo')
796
            return
2150.2.1 by Robert Collins
Correctly decode utf8 revision ids from knits when parsing, fixes a regression where a unicode revision id is stored correctly, but then indexed by the utf8 value on the next invocation of bzr, rather than the unicode value.
797
        vf = self.reopen_file()
1594.2.8 by Robert Collins
add ghost aware apis to knits.
798
        # test key graph related apis: getncestry, _graph, get_parents
799
        # has_version
800
        # - these are ghost unaware and must not be reflect ghosts
2249.5.12 by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8
801
        self.assertEqual(['notbxbfse'], vf.get_ancestry('notbxbfse'))
802
        self.assertFalse(vf.has_version(parent_id_utf8))
1594.2.8 by Robert Collins
add ghost aware apis to knits.
803
        # we have _with_ghost apis to give us ghost information.
2249.5.12 by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8
804
        self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry_with_ghosts(['notbxbfse']))
805
        self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
1594.2.8 by Robert Collins
add ghost aware apis to knits.
806
        # if we add something that is a ghost of another, it should correct the
807
        # results of the prior apis
2858.2.1 by Martin Pool
Remove most calls to safe_file_id and safe_revision_id.
808
        vf.add_lines(parent_id_utf8, [], [])
2249.5.12 by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8
809
        self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry(['notbxbfse']))
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
810
        self.assertEqual({'notbxbfse':(parent_id_utf8,)},
811
            vf.get_parent_map(['notbxbfse']))
2249.5.12 by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8
812
        self.assertTrue(vf.has_version(parent_id_utf8))
1594.2.8 by Robert Collins
add ghost aware apis to knits.
813
        # we have _with_ghost apis to give us ghost information.
2858.2.1 by Martin Pool
Remove most calls to safe_file_id and safe_revision_id.
814
        self.assertEqual([parent_id_utf8, 'notbxbfse'],
815
            vf.get_ancestry_with_ghosts(['notbxbfse']))
2249.5.12 by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8
816
        self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
1594.2.8 by Robert Collins
add ghost aware apis to knits.
817
1594.2.9 by Robert Collins
Teach Knit repositories how to handle ghosts without corrupting at all.
818
    def test_add_lines_with_ghosts_after_normal_revs(self):
819
        # some versioned file formats allow lines to be added with parent
820
        # information that is > than that in the format. Formats that do
821
        # not support this need to raise NotImplementedError on the
822
        # add_lines_with_ghosts api.
823
        vf = self.get_file()
824
        # probe for ghost support
825
        try:
3287.6.5 by Robert Collins
Deprecate VersionedFile.has_ghost.
826
            vf.add_lines_with_ghosts('base', [], ['line\n', 'line_b\n'])
1594.2.9 by Robert Collins
Teach Knit repositories how to handle ghosts without corrupting at all.
827
        except NotImplementedError:
828
            return
829
        vf.add_lines_with_ghosts('references_ghost',
830
                                 ['base', 'a_ghost'],
831
                                 ['line\n', 'line_b\n', 'line_c\n'])
832
        origins = vf.annotate('references_ghost')
833
        self.assertEquals(('base', 'line\n'), origins[0])
834
        self.assertEquals(('base', 'line_b\n'), origins[1])
835
        self.assertEquals(('references_ghost', 'line_c\n'), origins[2])
1594.2.23 by Robert Collins
Test versioned file storage handling of clean/dirty status for accessed versioned files.
836
837
    def test_readonly_mode(self):
838
        transport = get_transport(self.get_url('.'))
839
        factory = self.get_factory()
840
        vf = factory('id', transport, 0777, create=True, access_mode='w')
841
        vf = factory('id', transport, access_mode='r')
842
        self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
843
        self.assertRaises(errors.ReadOnlyError,
844
                          vf.add_lines_with_ghosts,
845
                          'base',
846
                          [],
847
                          [])
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
848
3316.2.9 by Robert Collins
* ``VersionedFile.get_sha1`` is deprecated, please use
849
    def test_get_sha1s(self):
1666.1.6 by Robert Collins
Make knit the default format.
850
        # check the sha1 data is available
851
        vf = self.get_file()
852
        # a simple file
853
        vf.add_lines('a', [], ['a\n'])
854
        # the same file, different metadata
855
        vf.add_lines('b', ['a'], ['a\n'])
856
        # a file differing only in last newline.
857
        vf.add_lines('c', [], ['a'])
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
858
        self.assertEqual({
859
            'a': '3f786850e387550fdab836ed7e6dc881de23001b',
860
            'c': '86f7e437faa5a7fce15d1ddcb9eaeaea377667b8',
861
            'b': '3f786850e387550fdab836ed7e6dc881de23001b',
862
            },
863
            vf.get_sha1s(['a', 'c', 'b']))
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
864
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
865
2535.3.1 by Andrew Bennetts
Add get_format_signature to VersionedFile
866
class TestWeave(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
867
868
    def get_file(self, name='foo'):
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
869
        return WeaveFile(name, get_transport(self.get_url('.')), create=True,
870
            get_scope=self.get_transaction)
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
871
1563.2.6 by Robert Collins
Start check tests for knits (pending), and remove dead code.
872
    def get_file_corrupted_text(self):
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
873
        w = WeaveFile('foo', get_transport(self.get_url('.')), create=True,
874
            get_scope=self.get_transaction)
1563.2.13 by Robert Collins
InterVersionedFile implemented.
875
        w.add_lines('v1', [], ['hello\n'])
876
        w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
877
1563.2.6 by Robert Collins
Start check tests for knits (pending), and remove dead code.
878
        # We are going to invasively corrupt the text
879
        # Make sure the internals of weave are the same
880
        self.assertEqual([('{', 0)
881
                        , 'hello\n'
882
                        , ('}', None)
883
                        , ('{', 1)
884
                        , 'there\n'
885
                        , ('}', None)
886
                        ], w._weave)
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
887
1563.2.6 by Robert Collins
Start check tests for knits (pending), and remove dead code.
888
        self.assertEqual(['f572d396fae9206628714fb2ce00f72e94f2258f'
889
                        , '90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
890
                        ], w._sha1s)
891
        w.check()
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
892
1563.2.6 by Robert Collins
Start check tests for knits (pending), and remove dead code.
893
        # Corrupted
894
        w._weave[4] = 'There\n'
895
        return w
896
897
    def get_file_corrupted_checksum(self):
898
        w = self.get_file_corrupted_text()
899
        # Corrected
900
        w._weave[4] = 'there\n'
901
        self.assertEqual('hello\nthere\n', w.get_text('v2'))
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
902
1563.2.6 by Robert Collins
Start check tests for knits (pending), and remove dead code.
903
        #Invalid checksum, first digit changed
904
        w._sha1s[1] =  'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'
905
        return w
906
1666.1.6 by Robert Collins
Make knit the default format.
907
    def reopen_file(self, name='foo', create=False):
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
908
        return WeaveFile(name, get_transport(self.get_url('.')), create=create,
909
            get_scope=self.get_transaction)
1563.2.9 by Robert Collins
Update versionedfile api tests to ensure that data is available after every operation.
910
1563.2.25 by Robert Collins
Merge in upstream.
911
    def test_no_implicit_create(self):
912
        self.assertRaises(errors.NoSuchFile,
913
                          WeaveFile,
914
                          'foo',
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
915
                          get_transport(self.get_url('.')),
916
                          get_scope=self.get_transaction)
1563.2.25 by Robert Collins
Merge in upstream.
917
1594.2.23 by Robert Collins
Test versioned file storage handling of clean/dirty status for accessed versioned files.
918
    def get_factory(self):
919
        return WeaveFile
920
1563.2.1 by Robert Collins
Merge in a variation of the versionedfile api from versioned-file.
921
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
922
class TestPlanMergeVersionedFile(TestCaseWithMemoryTransport):
923
924
    def setUp(self):
925
        TestCaseWithMemoryTransport.setUp(self)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
926
        mapper = PrefixMapper()
927
        factory = make_file_factory(True, mapper)
928
        self.vf1 = factory(self.get_transport('root-1'))
929
        self.vf2 = factory(self.get_transport('root-2'))
930
        self.plan_merge_vf = versionedfile._PlanMergeVersionedFile('root')
931
        self.plan_merge_vf.fallback_versionedfiles.extend([self.vf1, self.vf2])
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
932
933
    def test_add_lines(self):
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
934
        self.plan_merge_vf.add_lines(('root', 'a:'), [], [])
935
        self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
936
            ('root', 'a'), [], [])
937
        self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
938
            ('root', 'a:'), None, [])
939
        self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
940
            ('root', 'a:'), [], None)
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
941
942
    def setup_abcde(self):
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
943
        self.vf1.add_lines(('root', 'A'), [], ['a'])
944
        self.vf1.add_lines(('root', 'B'), [('root', 'A')], ['b'])
945
        self.vf2.add_lines(('root', 'C'), [], ['c'])
946
        self.vf2.add_lines(('root', 'D'), [('root', 'C')], ['d'])
947
        self.plan_merge_vf.add_lines(('root', 'E:'),
948
            [('root', 'B'), ('root', 'D')], ['e'])
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
949
950
    def test_get_parents(self):
951
        self.setup_abcde()
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
952
        self.assertEqual({('root', 'B'):(('root', 'A'),)},
953
            self.plan_merge_vf.get_parent_map([('root', 'B')]))
954
        self.assertEqual({('root', 'D'):(('root', 'C'),)},
955
            self.plan_merge_vf.get_parent_map([('root', 'D')]))
956
        self.assertEqual({('root', 'E:'):(('root', 'B'),('root', 'D'))},
957
            self.plan_merge_vf.get_parent_map([('root', 'E:')]))
958
        self.assertEqual({},
959
            self.plan_merge_vf.get_parent_map([('root', 'F')]))
3287.5.2 by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code.
960
        self.assertEqual({
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
961
                ('root', 'B'):(('root', 'A'),),
962
                ('root', 'D'):(('root', 'C'),),
963
                ('root', 'E:'):(('root', 'B'),('root', 'D')),
964
                },
965
            self.plan_merge_vf.get_parent_map(
966
                [('root', 'B'), ('root', 'D'), ('root', 'E:'), ('root', 'F')]))
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
967
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
968
    def test_get_record_stream(self):
3062.1.9 by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile
969
        self.setup_abcde()
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
970
        def get_record(suffix):
971
            return self.plan_merge_vf.get_record_stream(
972
                [('root', suffix)], 'unordered', True).next()
973
        self.assertEqual('a', get_record('A').get_bytes_as('fulltext'))
974
        self.assertEqual('c', get_record('C').get_bytes_as('fulltext'))
975
        self.assertEqual('e', get_record('E:').get_bytes_as('fulltext'))
976
        self.assertEqual('absent', get_record('F').storage_kind)
1666.1.1 by Robert Collins
Add trivial http-using test for versioned files.
977
978
979
class TestReadonlyHttpMixin(object):
980
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
981
    def get_transaction(self):
982
        return 1
983
1666.1.1 by Robert Collins
Add trivial http-using test for versioned files.
984
    def test_readonly_http_works(self):
985
        # we should be able to read from http with a versioned file.
986
        vf = self.get_file()
1666.1.6 by Robert Collins
Make knit the default format.
987
        # try an empty file access
988
        readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
989
        self.assertEqual([], readonly_vf.versions())
990
        # now with feeling.
1666.1.1 by Robert Collins
Add trivial http-using test for versioned files.
991
        vf.add_lines('1', [], ['a\n'])
992
        vf.add_lines('2', ['1'], ['b\n', 'a\n'])
993
        readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
1666.1.6 by Robert Collins
Make knit the default format.
994
        self.assertEqual(['1', '2'], vf.versions())
1666.1.1 by Robert Collins
Add trivial http-using test for versioned files.
995
        for version in readonly_vf.versions():
996
            readonly_vf.get_lines(version)
997
998
999
class TestWeaveHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
1000
1001
    def get_file(self):
3316.2.3 by Robert Collins
Remove manual notification of transaction finishing on versioned files.
1002
        return WeaveFile('foo', get_transport(self.get_url('.')), create=True,
1003
            get_scope=self.get_transaction)
1666.1.1 by Robert Collins
Add trivial http-using test for versioned files.
1004
1005
    def get_factory(self):
1006
        return WeaveFile
1007
1008
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1009
class MergeCasesMixin(object):
1010
1011
    def doMerge(self, base, a, b, mp):
1012
        from cStringIO import StringIO
1013
        from textwrap import dedent
1014
1015
        def addcrlf(x):
1016
            return x + '\n'
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1017
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1018
        w = self.get_file()
1019
        w.add_lines('text0', [], map(addcrlf, base))
1020
        w.add_lines('text1', ['text0'], map(addcrlf, a))
1021
        w.add_lines('text2', ['text0'], map(addcrlf, b))
1022
1023
        self.log_contents(w)
1024
1025
        self.log('merge plan:')
1026
        p = list(w.plan_merge('text1', 'text2'))
1027
        for state, line in p:
1028
            if line:
1029
                self.log('%12s | %s' % (state, line[:-1]))
1030
1031
        self.log('merge:')
1032
        mt = StringIO()
1033
        mt.writelines(w.weave_merge(p))
1034
        mt.seek(0)
1035
        self.log(mt.getvalue())
1036
1037
        mp = map(addcrlf, mp)
1038
        self.assertEqual(mt.readlines(), mp)
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1039
1040
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1041
    def testOneInsert(self):
1042
        self.doMerge([],
1043
                     ['aa'],
1044
                     [],
1045
                     ['aa'])
1046
1047
    def testSeparateInserts(self):
1048
        self.doMerge(['aaa', 'bbb', 'ccc'],
1049
                     ['aaa', 'xxx', 'bbb', 'ccc'],
1050
                     ['aaa', 'bbb', 'yyy', 'ccc'],
1051
                     ['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1052
1053
    def testSameInsert(self):
1054
        self.doMerge(['aaa', 'bbb', 'ccc'],
1055
                     ['aaa', 'xxx', 'bbb', 'ccc'],
1056
                     ['aaa', 'xxx', 'bbb', 'yyy', 'ccc'],
1057
                     ['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1058
    overlappedInsertExpected = ['aaa', 'xxx', 'yyy', 'bbb']
1059
    def testOverlappedInsert(self):
1060
        self.doMerge(['aaa', 'bbb'],
1061
                     ['aaa', 'xxx', 'yyy', 'bbb'],
1062
                     ['aaa', 'xxx', 'bbb'], self.overlappedInsertExpected)
1063
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1064
        # really it ought to reduce this to
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1065
        # ['aaa', 'xxx', 'yyy', 'bbb']
1066
1067
1068
    def testClashReplace(self):
1069
        self.doMerge(['aaa'],
1070
                     ['xxx'],
1071
                     ['yyy', 'zzz'],
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1072
                     ['<<<<<<< ', 'xxx', '=======', 'yyy', 'zzz',
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1073
                      '>>>>>>> '])
1074
1075
    def testNonClashInsert1(self):
1076
        self.doMerge(['aaa'],
1077
                     ['xxx', 'aaa'],
1078
                     ['yyy', 'zzz'],
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1079
                     ['<<<<<<< ', 'xxx', 'aaa', '=======', 'yyy', 'zzz',
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1080
                      '>>>>>>> '])
1081
1082
    def testNonClashInsert2(self):
1083
        self.doMerge(['aaa'],
1084
                     ['aaa'],
1085
                     ['yyy', 'zzz'],
1086
                     ['yyy', 'zzz'])
1087
1088
1089
    def testDeleteAndModify(self):
1090
        """Clashing delete and modification.
1091
1092
        If one side modifies a region and the other deletes it then
1093
        there should be a conflict with one side blank.
1094
        """
1095
1096
        #######################################
1097
        # skippd, not working yet
1098
        return
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1099
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1100
        self.doMerge(['aaa', 'bbb', 'ccc'],
1101
                     ['aaa', 'ddd', 'ccc'],
1102
                     ['aaa', 'ccc'],
1103
                     ['<<<<<<<< ', 'aaa', '=======', '>>>>>>> ', 'ccc'])
1104
1105
    def _test_merge_from_strings(self, base, a, b, expected):
1106
        w = self.get_file()
1107
        w.add_lines('text0', [], base.splitlines(True))
1108
        w.add_lines('text1', ['text0'], a.splitlines(True))
1109
        w.add_lines('text2', ['text0'], b.splitlines(True))
1110
        self.log('merge plan:')
1111
        p = list(w.plan_merge('text1', 'text2'))
1112
        for state, line in p:
1113
            if line:
1114
                self.log('%12s | %s' % (state, line[:-1]))
1115
        self.log('merge result:')
1116
        result_text = ''.join(w.weave_merge(p))
1117
        self.log(result_text)
1118
        self.assertEqualDiff(result_text, expected)
1119
1120
    def test_weave_merge_conflicts(self):
1121
        # does weave merge properly handle plans that end with unchanged?
1122
        result = ''.join(self.get_file().weave_merge([('new-a', 'hello\n')]))
1123
        self.assertEqual(result, 'hello\n')
1124
1125
    def test_deletion_extended(self):
1126
        """One side deletes, the other deletes more.
1127
        """
1128
        base = """\
1129
            line 1
1130
            line 2
1131
            line 3
1132
            """
1133
        a = """\
1134
            line 1
1135
            line 2
1136
            """
1137
        b = """\
1138
            line 1
1139
            """
1140
        result = """\
1141
            line 1
1142
            """
1143
        self._test_merge_from_strings(base, a, b, result)
1144
1145
    def test_deletion_overlap(self):
1146
        """Delete overlapping regions with no other conflict.
1147
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1148
        Arguably it'd be better to treat these as agreement, rather than
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1149
        conflict, but for now conflict is safer.
1150
        """
1151
        base = """\
1152
            start context
1153
            int a() {}
1154
            int b() {}
1155
            int c() {}
1156
            end context
1157
            """
1158
        a = """\
1159
            start context
1160
            int a() {}
1161
            end context
1162
            """
1163
        b = """\
1164
            start context
1165
            int c() {}
1166
            end context
1167
            """
1168
        result = """\
1169
            start context
3943.8.2 by Marius Kruger
fix tests relying on trailing whitespace by replacing it with \x20.
1170
<<<<<<<\x20
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1171
            int a() {}
1172
=======
1173
            int c() {}
3943.8.2 by Marius Kruger
fix tests relying on trailing whitespace by replacing it with \x20.
1174
>>>>>>>\x20
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1175
            end context
1176
            """
1177
        self._test_merge_from_strings(base, a, b, result)
1178
1179
    def test_agreement_deletion(self):
1180
        """Agree to delete some lines, without conflicts."""
1181
        base = """\
1182
            start context
1183
            base line 1
1184
            base line 2
1185
            end context
1186
            """
1187
        a = """\
1188
            start context
1189
            base line 1
1190
            end context
1191
            """
1192
        b = """\
1193
            start context
1194
            base line 1
1195
            end context
1196
            """
1197
        result = """\
1198
            start context
1199
            base line 1
1200
            end context
1201
            """
1202
        self._test_merge_from_strings(base, a, b, result)
1203
1204
    def test_sync_on_deletion(self):
1205
        """Specific case of merge where we can synchronize incorrectly.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1206
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1207
        A previous version of the weave merge concluded that the two versions
1208
        agreed on deleting line 2, and this could be a synchronization point.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1209
        Line 1 was then considered in isolation, and thought to be deleted on
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1210
        both sides.
1211
1212
        It's better to consider the whole thing as a disagreement region.
1213
        """
1214
        base = """\
1215
            start context
1216
            base line 1
1217
            base line 2
1218
            end context
1219
            """
1220
        a = """\
1221
            start context
1222
            base line 1
1223
            a's replacement line 2
1224
            end context
1225
            """
1226
        b = """\
1227
            start context
1228
            b replaces
1229
            both lines
1230
            end context
1231
            """
1232
        result = """\
1233
            start context
3943.8.2 by Marius Kruger
fix tests relying on trailing whitespace by replacing it with \x20.
1234
<<<<<<<\x20
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1235
            base line 1
1236
            a's replacement line 2
1237
=======
1238
            b replaces
1239
            both lines
3943.8.2 by Marius Kruger
fix tests relying on trailing whitespace by replacing it with \x20.
1240
>>>>>>>\x20
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1241
            end context
1242
            """
1243
        self._test_merge_from_strings(base, a, b, result)
1244
1245
2535.3.1 by Andrew Bennetts
Add get_format_signature to VersionedFile
1246
class TestWeaveMerge(TestCaseWithMemoryTransport, MergeCasesMixin):
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1247
1248
    def get_file(self, name='foo'):
1249
        return WeaveFile(name, get_transport(self.get_url('.')), create=True)
1250
1251
    def log_contents(self, w):
1252
        self.log('weave is:')
1253
        tmpf = StringIO()
1254
        write_weave(w, tmpf)
1255
        self.log(tmpf.getvalue())
1256
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1257
    overlappedInsertExpected = ['aaa', '<<<<<<< ', 'xxx', 'yyy', '=======',
1664.2.9 by Aaron Bentley
Ported weave merge test to versionedfile
1258
                                'xxx', '>>>>>>> ', 'bbb']
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1259
1260
1261
class TestContentFactoryAdaption(TestCaseWithMemoryTransport):
1262
1263
    def test_select_adaptor(self):
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
1264
        """Test expected adapters exist."""
1265
        # One scenario for each lookup combination we expect to use.
1266
        # Each is source_kind, requested_kind, adapter class
1267
        scenarios = [
1268
            ('knit-delta-gz', 'fulltext', _mod_knit.DeltaPlainToFullText),
1269
            ('knit-ft-gz', 'fulltext', _mod_knit.FTPlainToFullText),
1270
            ('knit-annotated-delta-gz', 'knit-delta-gz',
1271
                _mod_knit.DeltaAnnotatedToUnannotated),
1272
            ('knit-annotated-delta-gz', 'fulltext',
1273
                _mod_knit.DeltaAnnotatedToFullText),
1274
            ('knit-annotated-ft-gz', 'knit-ft-gz',
1275
                _mod_knit.FTAnnotatedToUnannotated),
1276
            ('knit-annotated-ft-gz', 'fulltext',
1277
                _mod_knit.FTAnnotatedToFullText),
1278
            ]
1279
        for source, requested, klass in scenarios:
1280
            adapter_factory = versionedfile.adapter_registry.get(
1281
                (source, requested))
1282
            adapter = adapter_factory(None)
1283
            self.assertIsInstance(adapter, klass)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1284
3350.3.5 by Robert Collins
Create adapters from plain compressed knit content.
1285
    def get_knit(self, annotated=True):
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1286
        mapper = ConstantMapper('knit')
1287
        transport = self.get_transport()
1288
        return make_file_factory(annotated, mapper)(transport)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1289
1290
    def helpGetBytes(self, f, ft_adapter, delta_adapter):
3350.3.22 by Robert Collins
Review feedback.
1291
        """Grab the interested adapted texts for tests."""
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1292
        # origin is a fulltext
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1293
        entries = f.get_record_stream([('origin',)], 'unordered', False)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1294
        base = entries.next()
4005.3.1 by Robert Collins
Change the signature on VersionedFiles adapters to allow less typing and more flexability inside adapters.
1295
        ft_data = ft_adapter.get_bytes(base)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1296
        # merged is both a delta and multiple parents.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1297
        entries = f.get_record_stream([('merged',)], 'unordered', False)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1298
        merged = entries.next()
4005.3.1 by Robert Collins
Change the signature on VersionedFiles adapters to allow less typing and more flexability inside adapters.
1299
        delta_data = delta_adapter.get_bytes(merged)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1300
        return ft_data, delta_data
1301
1302
    def test_deannotation_noeol(self):
1303
        """Test converting annotated knits to unannotated knits."""
1304
        # we need a full text, and a delta
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1305
        f = self.get_knit()
1306
        get_diamond_files(f, 1, trailing_eol=False)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1307
        ft_data, delta_data = self.helpGetBytes(f,
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
1308
            _mod_knit.FTAnnotatedToUnannotated(None),
1309
            _mod_knit.DeltaAnnotatedToUnannotated(None))
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1310
        self.assertEqual(
1311
            'version origin 1 b284f94827db1fa2970d9e2014f080413b547a7e\n'
1312
            'origin\n'
1313
            'end origin\n',
1314
            GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1315
        self.assertEqual(
1316
            'version merged 4 32c2e79763b3f90e8ccde37f9710b6629c25a796\n'
1317
            '1,2,3\nleft\nright\nmerged\nend merged\n',
1318
            GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1319
1320
    def test_deannotation(self):
1321
        """Test converting annotated knits to unannotated knits."""
1322
        # we need a full text, and a delta
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1323
        f = self.get_knit()
1324
        get_diamond_files(f, 1)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1325
        ft_data, delta_data = self.helpGetBytes(f,
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
1326
            _mod_knit.FTAnnotatedToUnannotated(None),
1327
            _mod_knit.DeltaAnnotatedToUnannotated(None))
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1328
        self.assertEqual(
1329
            'version origin 1 00e364d235126be43292ab09cb4686cf703ddc17\n'
1330
            'origin\n'
1331
            'end origin\n',
1332
            GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1333
        self.assertEqual(
1334
            'version merged 3 ed8bce375198ea62444dc71952b22cfc2b09226d\n'
1335
            '2,2,2\nright\nmerged\nend merged\n',
1336
            GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1337
1338
    def test_annotated_to_fulltext_no_eol(self):
1339
        """Test adapting annotated knits to full texts (for -> weaves)."""
1340
        # we need a full text, and a delta
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1341
        f = self.get_knit()
1342
        get_diamond_files(f, 1, trailing_eol=False)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1343
        # Reconstructing a full text requires a backing versioned file, and it
1344
        # must have the base lines requested from it.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1345
        logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1346
        ft_data, delta_data = self.helpGetBytes(f,
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
1347
            _mod_knit.FTAnnotatedToFullText(None),
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1348
            _mod_knit.DeltaAnnotatedToFullText(logged_vf))
1349
        self.assertEqual('origin', ft_data)
1350
        self.assertEqual('base\nleft\nright\nmerged', delta_data)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1351
        self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1352
            True)], logged_vf.calls)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1353
1354
    def test_annotated_to_fulltext(self):
1355
        """Test adapting annotated knits to full texts (for -> weaves)."""
1356
        # we need a full text, and a delta
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1357
        f = self.get_knit()
1358
        get_diamond_files(f, 1)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1359
        # Reconstructing a full text requires a backing versioned file, and it
1360
        # must have the base lines requested from it.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1361
        logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1362
        ft_data, delta_data = self.helpGetBytes(f,
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
1363
            _mod_knit.FTAnnotatedToFullText(None),
3350.3.4 by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts.
1364
            _mod_knit.DeltaAnnotatedToFullText(logged_vf))
1365
        self.assertEqual('origin\n', ft_data)
1366
        self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1367
        self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1368
            True)], logged_vf.calls)
3350.3.5 by Robert Collins
Create adapters from plain compressed knit content.
1369
1370
    def test_unannotated_to_fulltext(self):
1371
        """Test adapting unannotated knits to full texts.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1372
3350.3.5 by Robert Collins
Create adapters from plain compressed knit content.
1373
        This is used for -> weaves, and for -> annotated knits.
1374
        """
1375
        # we need a full text, and a delta
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1376
        f = self.get_knit(annotated=False)
1377
        get_diamond_files(f, 1)
3350.3.5 by Robert Collins
Create adapters from plain compressed knit content.
1378
        # Reconstructing a full text requires a backing versioned file, and it
1379
        # must have the base lines requested from it.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1380
        logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
3350.3.5 by Robert Collins
Create adapters from plain compressed knit content.
1381
        ft_data, delta_data = self.helpGetBytes(f,
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
1382
            _mod_knit.FTPlainToFullText(None),
3350.3.5 by Robert Collins
Create adapters from plain compressed knit content.
1383
            _mod_knit.DeltaPlainToFullText(logged_vf))
1384
        self.assertEqual('origin\n', ft_data)
1385
        self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1386
        self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1387
            True)], logged_vf.calls)
3350.3.5 by Robert Collins
Create adapters from plain compressed knit content.
1388
3350.3.6 by Robert Collins
Test EOL behaviour of plain knit record adapters.
1389
    def test_unannotated_to_fulltext_no_eol(self):
1390
        """Test adapting unannotated knits to full texts.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
1391
3350.3.6 by Robert Collins
Test EOL behaviour of plain knit record adapters.
1392
        This is used for -> weaves, and for -> annotated knits.
1393
        """
1394
        # we need a full text, and a delta
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1395
        f = self.get_knit(annotated=False)
1396
        get_diamond_files(f, 1, trailing_eol=False)
3350.3.6 by Robert Collins
Test EOL behaviour of plain knit record adapters.
1397
        # Reconstructing a full text requires a backing versioned file, and it
1398
        # must have the base lines requested from it.
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1399
        logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
3350.3.6 by Robert Collins
Test EOL behaviour of plain knit record adapters.
1400
        ft_data, delta_data = self.helpGetBytes(f,
3350.3.7 by Robert Collins
Create a registry of versioned file record adapters.
1401
            _mod_knit.FTPlainToFullText(None),
3350.3.6 by Robert Collins
Test EOL behaviour of plain knit record adapters.
1402
            _mod_knit.DeltaPlainToFullText(logged_vf))
1403
        self.assertEqual('origin', ft_data)
1404
        self.assertEqual('base\nleft\nright\nmerged', delta_data)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1405
        self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1406
            True)], logged_vf.calls)
3350.3.6 by Robert Collins
Test EOL behaviour of plain knit record adapters.
1407
3350.6.1 by Robert Collins
* New ``versionedfile.KeyMapper`` interface to abstract out the access to
1408
1409
class TestKeyMapper(TestCaseWithMemoryTransport):
1410
    """Tests for various key mapping logic."""
1411
1412
    def test_identity_mapper(self):
1413
        mapper = versionedfile.ConstantMapper("inventory")
1414
        self.assertEqual("inventory", mapper.map(('foo@ar',)))
1415
        self.assertEqual("inventory", mapper.map(('quux',)))
1416
1417
    def test_prefix_mapper(self):
1418
        #format5: plain
1419
        mapper = versionedfile.PrefixMapper()
1420
        self.assertEqual("file-id", mapper.map(("file-id", "revision-id")))
1421
        self.assertEqual("new-id", mapper.map(("new-id", "revision-id")))
1422
        self.assertEqual(('file-id',), mapper.unmap("file-id"))
1423
        self.assertEqual(('new-id',), mapper.unmap("new-id"))
1424
1425
    def test_hash_prefix_mapper(self):
1426
        #format6: hash + plain
1427
        mapper = versionedfile.HashPrefixMapper()
1428
        self.assertEqual("9b/file-id", mapper.map(("file-id", "revision-id")))
1429
        self.assertEqual("45/new-id", mapper.map(("new-id", "revision-id")))
1430
        self.assertEqual(('file-id',), mapper.unmap("9b/file-id"))
1431
        self.assertEqual(('new-id',), mapper.unmap("45/new-id"))
1432
1433
    def test_hash_escaped_mapper(self):
1434
        #knit1: hash + escaped
1435
        mapper = versionedfile.HashEscapedPrefixMapper()
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1436
        self.assertEqual("88/%2520", mapper.map((" ", "revision-id")))
1437
        self.assertEqual("ed/fil%2545-%2549d", mapper.map(("filE-Id",
1438
            "revision-id")))
1439
        self.assertEqual("88/ne%2557-%2549d", mapper.map(("neW-Id",
1440
            "revision-id")))
1441
        self.assertEqual(('filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))
1442
        self.assertEqual(('neW-Id',), mapper.unmap("88/ne%2557-%2549d"))
3350.6.2 by Robert Collins
Prepare parameterised test environment.
1443
1444
1445
class TestVersionedFiles(TestCaseWithMemoryTransport):
1446
    """Tests for the multiple-file variant of VersionedFile."""
1447
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1448
    def get_versionedfiles(self, relpath='files'):
1449
        transport = self.get_transport(relpath)
1450
        if relpath != '.':
1451
            transport.mkdir('.')
1452
        files = self.factory(transport)
1453
        if self.cleanup is not None:
1454
            self.addCleanup(lambda:self.cleanup(files))
1455
        return files
1456
1457
    def test_annotate(self):
1458
        files = self.get_versionedfiles()
1459
        self.get_diamond_files(files)
1460
        if self.key_length == 1:
1461
            prefix = ()
1462
        else:
1463
            prefix = ('FileA',)
1464
        # introduced full text
1465
        origins = files.annotate(prefix + ('origin',))
1466
        self.assertEqual([
1467
            (prefix + ('origin',), 'origin\n')],
1468
            origins)
1469
        # a delta
1470
        origins = files.annotate(prefix + ('base',))
1471
        self.assertEqual([
1472
            (prefix + ('base',), 'base\n')],
1473
            origins)
1474
        # a merge
1475
        origins = files.annotate(prefix + ('merged',))
1476
        if self.graph:
1477
            self.assertEqual([
1478
                (prefix + ('base',), 'base\n'),
1479
                (prefix + ('left',), 'left\n'),
1480
                (prefix + ('right',), 'right\n'),
1481
                (prefix + ('merged',), 'merged\n')
1482
                ],
1483
                origins)
1484
        else:
1485
            # Without a graph everything is new.
1486
            self.assertEqual([
1487
                (prefix + ('merged',), 'base\n'),
1488
                (prefix + ('merged',), 'left\n'),
1489
                (prefix + ('merged',), 'right\n'),
1490
                (prefix + ('merged',), 'merged\n')
1491
                ],
1492
                origins)
1493
        self.assertRaises(RevisionNotPresent,
1494
            files.annotate, prefix + ('missing-key',))
1495
3350.6.2 by Robert Collins
Prepare parameterised test environment.
1496
    def test_construct(self):
1497
        """Each parameterised test can be constructed on a transport."""
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1498
        files = self.get_versionedfiles()
1499
3735.2.5 by Robert Collins
Teach VersionedFiles how to allocate keys based on content hashes.
1500
    def get_diamond_files(self, files, trailing_eol=True, left_only=False,
1501
        nokeys=False):
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1502
        return get_diamond_files(files, self.key_length,
1503
            trailing_eol=trailing_eol, nograph=not self.graph,
3735.2.5 by Robert Collins
Teach VersionedFiles how to allocate keys based on content hashes.
1504
            left_only=left_only, nokeys=nokeys)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1505
4119.1.1 by John Arbash Meinel
Move the 'add_lines_nostoresha' to being tested against all VF implementations.
1506
    def test_add_lines_nostoresha(self):
1507
        """When nostore_sha is supplied using old content raises."""
1508
        vf = self.get_versionedfiles()
1509
        empty_text = ('a', [])
1510
        sample_text_nl = ('b', ["foo\n", "bar\n"])
1511
        sample_text_no_nl = ('c', ["foo\n", "bar"])
1512
        shas = []
1513
        for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
1514
            sha, _, _ = vf.add_lines(self.get_simple_key(version), [], lines)
1515
            shas.append(sha)
1516
        # we now have a copy of all the lines in the vf.
1517
        for sha, (version, lines) in zip(
1518
            shas, (empty_text, sample_text_nl, sample_text_no_nl)):
1519
            new_key = self.get_simple_key(version + "2")
1520
            self.assertRaises(errors.ExistingContent,
1521
                vf.add_lines, new_key, [], lines,
1522
                nostore_sha=sha)
1523
            # and no new version should have been added.
1524
            record = vf.get_record_stream([new_key], 'unordered', True).next()
1525
            self.assertEqual('absent', record.storage_kind)
1526
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1527
    def test_add_lines_return(self):
1528
        files = self.get_versionedfiles()
1529
        # save code by using the stock data insertion helper.
1530
        adds = self.get_diamond_files(files)
1531
        results = []
1532
        # We can only validate the first 2 elements returned from add_lines.
1533
        for add in adds:
1534
            self.assertEqual(3, len(add))
1535
            results.append(add[:2])
1536
        if self.key_length == 1:
1537
            self.assertEqual([
1538
                ('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1539
                ('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1540
                ('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1541
                ('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1542
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1543
                results)
1544
        elif self.key_length == 2:
1545
            self.assertEqual([
1546
                ('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1547
                ('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1548
                ('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1549
                ('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1550
                ('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1551
                ('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1552
                ('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1553
                ('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1554
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1555
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1556
                results)
1557
3735.2.5 by Robert Collins
Teach VersionedFiles how to allocate keys based on content hashes.
1558
    def test_add_lines_no_key_generates_chk_key(self):
1559
        files = self.get_versionedfiles()
1560
        # save code by using the stock data insertion helper.
1561
        adds = self.get_diamond_files(files, nokeys=True)
1562
        results = []
1563
        # We can only validate the first 2 elements returned from add_lines.
1564
        for add in adds:
1565
            self.assertEqual(3, len(add))
1566
            results.append(add[:2])
1567
        if self.key_length == 1:
1568
            self.assertEqual([
1569
                ('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1570
                ('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1571
                ('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1572
                ('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1573
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1574
                results)
1575
            # Check the added items got CHK keys.
1576
            self.assertEqual(set([
1577
                ('sha1:00e364d235126be43292ab09cb4686cf703ddc17',),
1578
                ('sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',),
1579
                ('sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',),
1580
                ('sha1:a8478686da38e370e32e42e8a0c220e33ee9132f',),
1581
                ('sha1:ed8bce375198ea62444dc71952b22cfc2b09226d',),
1582
                ]),
1583
                files.keys())
1584
        elif self.key_length == 2:
1585
            self.assertEqual([
1586
                ('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1587
                ('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1588
                ('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1589
                ('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1590
                ('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1591
                ('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1592
                ('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1593
                ('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1594
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1595
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1596
                results)
1597
            # Check the added items got CHK keys.
1598
            self.assertEqual(set([
1599
                ('FileA', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1600
                ('FileA', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1601
                ('FileA', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1602
                ('FileA', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1603
                ('FileA', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1604
                ('FileB', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1605
                ('FileB', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1606
                ('FileB', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1607
                ('FileB', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1608
                ('FileB', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1609
                ]),
1610
                files.keys())
1611
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1612
    def test_empty_lines(self):
1613
        """Empty files can be stored."""
1614
        f = self.get_versionedfiles()
1615
        key_a = self.get_simple_key('a')
1616
        f.add_lines(key_a, [], [])
1617
        self.assertEqual('',
1618
            f.get_record_stream([key_a], 'unordered', True
1619
                ).next().get_bytes_as('fulltext'))
1620
        key_b = self.get_simple_key('b')
1621
        f.add_lines(key_b, self.get_parents([key_a]), [])
1622
        self.assertEqual('',
1623
            f.get_record_stream([key_b], 'unordered', True
1624
                ).next().get_bytes_as('fulltext'))
1625
1626
    def test_newline_only(self):
1627
        f = self.get_versionedfiles()
1628
        key_a = self.get_simple_key('a')
1629
        f.add_lines(key_a, [], ['\n'])
1630
        self.assertEqual('\n',
1631
            f.get_record_stream([key_a], 'unordered', True
1632
                ).next().get_bytes_as('fulltext'))
1633
        key_b = self.get_simple_key('b')
1634
        f.add_lines(key_b, self.get_parents([key_a]), ['\n'])
1635
        self.assertEqual('\n',
1636
            f.get_record_stream([key_b], 'unordered', True
1637
                ).next().get_bytes_as('fulltext'))
1638
1639
    def test_get_record_stream_empty(self):
1640
        """An empty stream can be requested without error."""
1641
        f = self.get_versionedfiles()
1642
        entries = f.get_record_stream([], 'unordered', False)
1643
        self.assertEqual([], list(entries))
1644
1645
    def assertValidStorageKind(self, storage_kind):
1646
        """Assert that storage_kind is a valid storage_kind."""
1647
        self.assertSubset([storage_kind],
1648
            ['mpdiff', 'knit-annotated-ft', 'knit-annotated-delta',
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
1649
             'knit-ft', 'knit-delta', 'chunked', 'fulltext',
1650
             'knit-annotated-ft-gz', 'knit-annotated-delta-gz', 'knit-ft-gz',
4005.3.6 by Robert Collins
Support delta_closure=True with NetworkRecordStream to transmit deltas over the wire when full text extraction is required on the far end.
1651
             'knit-delta-gz',
1652
             'knit-delta-closure', 'knit-delta-closure-ref'])
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1653
1654
    def capture_stream(self, f, entries, on_seen, parents):
1655
        """Capture a stream for testing."""
1656
        for factory in entries:
1657
            on_seen(factory.key)
1658
            self.assertValidStorageKind(factory.storage_kind)
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
1659
            self.assertEqual(f.get_sha1s([factory.key])[factory.key],
1660
                factory.sha1)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1661
            self.assertEqual(parents[factory.key], factory.parents)
1662
            self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1663
                str)
1664
1665
    def test_get_record_stream_interface(self):
1666
        """each item in a stream has to provide a regular interface."""
1667
        files = self.get_versionedfiles()
1668
        self.get_diamond_files(files)
1669
        keys, _ = self.get_keys_and_sort_order()
1670
        parent_map = files.get_parent_map(keys)
1671
        entries = files.get_record_stream(keys, 'unordered', False)
1672
        seen = set()
1673
        self.capture_stream(files, entries, seen.add, parent_map)
1674
        self.assertEqual(set(keys), seen)
1675
1676
    def get_simple_key(self, suffix):
1677
        """Return a key for the object under test."""
1678
        if self.key_length == 1:
1679
            return (suffix,)
1680
        else:
1681
            return ('FileA',) + (suffix,)
1682
1683
    def get_keys_and_sort_order(self):
1684
        """Get diamond test keys list, and their sort ordering."""
1685
        if self.key_length == 1:
1686
            keys = [('merged',), ('left',), ('right',), ('base',)]
1687
            sort_order = {('merged',):2, ('left',):1, ('right',):1, ('base',):0}
1688
        else:
1689
            keys = [
1690
                ('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1691
                ('FileA', 'base'),
1692
                ('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1693
                ('FileB', 'base'),
1694
                ]
1695
            sort_order = {
1696
                ('FileA', 'merged'):2, ('FileA', 'left'):1, ('FileA', 'right'):1,
1697
                ('FileA', 'base'):0,
1698
                ('FileB', 'merged'):2, ('FileB', 'left'):1, ('FileB', 'right'):1,
1699
                ('FileB', 'base'):0,
1700
                }
1701
        return keys, sort_order
1702
4111.1.1 by Robert Collins
Add a groupcompress sort order.
1703
    def get_keys_and_groupcompress_sort_order(self):
1704
        """Get diamond test keys list, and their groupcompress sort ordering."""
1705
        if self.key_length == 1:
1706
            keys = [('merged',), ('left',), ('right',), ('base',)]
1707
            sort_order = {('merged',):0, ('left',):1, ('right',):1, ('base',):2}
1708
        else:
1709
            keys = [
1710
                ('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1711
                ('FileA', 'base'),
1712
                ('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1713
                ('FileB', 'base'),
1714
                ]
1715
            sort_order = {
1716
                ('FileA', 'merged'):0, ('FileA', 'left'):1, ('FileA', 'right'):1,
1717
                ('FileA', 'base'):2,
1718
                ('FileB', 'merged'):3, ('FileB', 'left'):4, ('FileB', 'right'):4,
1719
                ('FileB', 'base'):5,
1720
                }
1721
        return keys, sort_order
1722
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1723
    def test_get_record_stream_interface_ordered(self):
1724
        """each item in a stream has to provide a regular interface."""
1725
        files = self.get_versionedfiles()
1726
        self.get_diamond_files(files)
1727
        keys, sort_order = self.get_keys_and_sort_order()
1728
        parent_map = files.get_parent_map(keys)
1729
        entries = files.get_record_stream(keys, 'topological', False)
1730
        seen = []
1731
        self.capture_stream(files, entries, seen.append, parent_map)
1732
        self.assertStreamOrder(sort_order, seen, keys)
1733
1734
    def test_get_record_stream_interface_ordered_with_delta_closure(self):
1735
        """each item must be accessible as a fulltext."""
1736
        files = self.get_versionedfiles()
1737
        self.get_diamond_files(files)
1738
        keys, sort_order = self.get_keys_and_sort_order()
1739
        parent_map = files.get_parent_map(keys)
1740
        entries = files.get_record_stream(keys, 'topological', True)
1741
        seen = []
1742
        for factory in entries:
1743
            seen.append(factory.key)
1744
            self.assertValidStorageKind(factory.storage_kind)
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
1745
            self.assertSubset([factory.sha1],
1746
                [None, files.get_sha1s([factory.key])[factory.key]])
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1747
            self.assertEqual(parent_map[factory.key], factory.parents)
1748
            # self.assertEqual(files.get_text(factory.key),
3890.2.1 by John Arbash Meinel
Start working on a ChunkedContentFactory.
1749
            ft_bytes = factory.get_bytes_as('fulltext')
1750
            self.assertIsInstance(ft_bytes, str)
1751
            chunked_bytes = factory.get_bytes_as('chunked')
1752
            self.assertEqualDiff(ft_bytes, ''.join(chunked_bytes))
1753
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1754
        self.assertStreamOrder(sort_order, seen, keys)
1755
4111.1.1 by Robert Collins
Add a groupcompress sort order.
1756
    def test_get_record_stream_interface_groupcompress(self):
1757
        """each item in a stream has to provide a regular interface."""
1758
        files = self.get_versionedfiles()
1759
        self.get_diamond_files(files)
1760
        keys, sort_order = self.get_keys_and_groupcompress_sort_order()
1761
        parent_map = files.get_parent_map(keys)
1762
        entries = files.get_record_stream(keys, 'groupcompress', False)
1763
        seen = []
1764
        self.capture_stream(files, entries, seen.append, parent_map)
1765
        self.assertStreamOrder(sort_order, seen, keys)
1766
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1767
    def assertStreamOrder(self, sort_order, seen, keys):
1768
        self.assertEqual(len(set(seen)), len(keys))
1769
        if self.key_length == 1:
1770
            lows = {():0}
1771
        else:
1772
            lows = {('FileA',):0, ('FileB',):0}
1773
        if not self.graph:
1774
            self.assertEqual(set(keys), set(seen))
1775
        else:
1776
            for key in seen:
1777
                sort_pos = sort_order[key]
1778
                self.assertTrue(sort_pos >= lows[key[:-1]],
1779
                    "Out of order in sorted stream: %r, %r" % (key, seen))
1780
                lows[key[:-1]] = sort_pos
1781
1782
    def test_get_record_stream_unknown_storage_kind_raises(self):
1783
        """Asking for a storage kind that the stream cannot supply raises."""
1784
        files = self.get_versionedfiles()
1785
        self.get_diamond_files(files)
1786
        if self.key_length == 1:
1787
            keys = [('merged',), ('left',), ('right',), ('base',)]
1788
        else:
1789
            keys = [
1790
                ('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1791
                ('FileA', 'base'),
1792
                ('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1793
                ('FileB', 'base'),
1794
                ]
1795
        parent_map = files.get_parent_map(keys)
1796
        entries = files.get_record_stream(keys, 'unordered', False)
1797
        # We track the contents because we should be able to try, fail a
1798
        # particular kind and then ask for one that works and continue.
1799
        seen = set()
1800
        for factory in entries:
1801
            seen.add(factory.key)
1802
            self.assertValidStorageKind(factory.storage_kind)
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
1803
            self.assertEqual(files.get_sha1s([factory.key])[factory.key],
1804
                factory.sha1)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1805
            self.assertEqual(parent_map[factory.key], factory.parents)
1806
            # currently no stream emits mpdiff
1807
            self.assertRaises(errors.UnavailableRepresentation,
1808
                factory.get_bytes_as, 'mpdiff')
1809
            self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1810
                str)
1811
        self.assertEqual(set(keys), seen)
1812
1813
    def test_get_record_stream_missing_records_are_absent(self):
1814
        files = self.get_versionedfiles()
1815
        self.get_diamond_files(files)
1816
        if self.key_length == 1:
1817
            keys = [('merged',), ('left',), ('right',), ('absent',), ('base',)]
1818
        else:
1819
            keys = [
1820
                ('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1821
                ('FileA', 'absent'), ('FileA', 'base'),
1822
                ('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1823
                ('FileB', 'absent'), ('FileB', 'base'),
1824
                ('absent', 'absent'),
1825
                ]
1826
        parent_map = files.get_parent_map(keys)
1827
        entries = files.get_record_stream(keys, 'unordered', False)
1828
        self.assertAbsentRecord(files, keys, parent_map, entries)
1829
        entries = files.get_record_stream(keys, 'topological', False)
1830
        self.assertAbsentRecord(files, keys, parent_map, entries)
1831
4005.3.3 by Robert Collins
Test NetworkRecordStream with delta'd texts.
1832
    def assertRecordHasContent(self, record, bytes):
1833
        """Assert that record has the bytes bytes."""
1834
        self.assertEqual(bytes, record.get_bytes_as('fulltext'))
4005.3.7 by Robert Collins
Review feedback.
1835
        self.assertEqual(bytes, ''.join(record.get_bytes_as('chunked')))
4005.3.3 by Robert Collins
Test NetworkRecordStream with delta'd texts.
1836
4005.3.2 by Robert Collins
First passing NetworkRecordStream test - a fulltext from any record type which isn't a chunked or fulltext can be serialised and deserialised successfully.
1837
    def test_get_record_stream_native_formats_are_wire_ready_one_ft(self):
1838
        files = self.get_versionedfiles()
1839
        key = self.get_simple_key('foo')
1840
        files.add_lines(key, (), ['my text\n', 'content'])
1841
        stream = files.get_record_stream([key], 'unordered', False)
1842
        record = stream.next()
1843
        if record.storage_kind in ('chunked', 'fulltext'):
1844
            # chunked and fulltext representations are for direct use not wire
4005.3.3 by Robert Collins
Test NetworkRecordStream with delta'd texts.
1845
            # serialisation: check they are able to be used directly. To send
1846
            # such records over the wire translation will be needed.
1847
            self.assertRecordHasContent(record, "my text\ncontent")
4005.3.2 by Robert Collins
First passing NetworkRecordStream test - a fulltext from any record type which isn't a chunked or fulltext can be serialised and deserialised successfully.
1848
        else:
1849
            bytes = [record.get_bytes_as(record.storage_kind)]
1850
            network_stream = versionedfile.NetworkRecordStream(bytes).read()
1851
            source_record = record
1852
            records = []
1853
            for record in network_stream:
1854
                records.append(record)
1855
                self.assertEqual(source_record.storage_kind,
1856
                    record.storage_kind)
1857
                self.assertEqual(source_record.parents, record.parents)
1858
                self.assertEqual(
1859
                    source_record.get_bytes_as(source_record.storage_kind),
1860
                    record.get_bytes_as(record.storage_kind))
1861
            self.assertEqual(1, len(records))
1862
4005.3.5 by Robert Collins
Interface level test for using delta_closure=True over the network.
1863
    def assertStreamMetaEqual(self, records, expected, stream):
1864
        """Assert that streams expected and stream have the same records.
4032.1.2 by John Arbash Meinel
Track down a few more files that have trailing whitespace.
1865
4005.3.5 by Robert Collins
Interface level test for using delta_closure=True over the network.
1866
        :param records: A list to collect the seen records.
1867
        :return: A generator of the records in stream.
1868
        """
1869
        # We make assertions during copying to catch things early for
1870
        # easier debugging.
1871
        for record, ref_record in izip(stream, expected):
1872
            records.append(record)
1873
            self.assertEqual(ref_record.key, record.key)
1874
            self.assertEqual(ref_record.storage_kind, record.storage_kind)
1875
            self.assertEqual(ref_record.parents, record.parents)
1876
            yield record
1877
1878
    def stream_to_bytes_or_skip_counter(self, skipped_records, full_texts,
1879
        stream):
1880
        """Convert a stream to a bytes iterator.
1881
1882
        :param skipped_records: A list with one element to increment when a
1883
            record is skipped.
4032.1.2 by John Arbash Meinel
Track down a few more files that have trailing whitespace.
1884
        :param full_texts: A dict from key->fulltext representation, for
4005.3.5 by Robert Collins
Interface level test for using delta_closure=True over the network.
1885
            checking chunked or fulltext stored records.
1886
        :param stream: A record_stream.
1887
        :return: An iterator over the bytes of each record.
1888
        """
1889
        for record in stream:
1890
            if record.storage_kind in ('chunked', 'fulltext'):
1891
                skipped_records[0] += 1
1892
                # check the content is correct for direct use.
1893
                self.assertRecordHasContent(record, full_texts[record.key])
1894
            else:
1895
                yield record.get_bytes_as(record.storage_kind)
1896
4005.3.3 by Robert Collins
Test NetworkRecordStream with delta'd texts.
1897
    def test_get_record_stream_native_formats_are_wire_ready_ft_delta(self):
1898
        files = self.get_versionedfiles()
1899
        target_files = self.get_versionedfiles('target')
1900
        key = self.get_simple_key('ft')
1901
        key_delta = self.get_simple_key('delta')
1902
        files.add_lines(key, (), ['my text\n', 'content'])
1903
        if self.graph:
1904
            delta_parents = (key,)
1905
        else:
1906
            delta_parents = ()
1907
        files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
1908
        local = files.get_record_stream([key, key_delta], 'unordered', False)
1909
        ref = files.get_record_stream([key, key_delta], 'unordered', False)
1910
        skipped_records = [0]
4005.3.5 by Robert Collins
Interface level test for using delta_closure=True over the network.
1911
        full_texts = {
1912
            key: "my text\ncontent",
1913
            key_delta: "different\ncontent\n",
1914
            }
1915
        byte_stream = self.stream_to_bytes_or_skip_counter(
1916
            skipped_records, full_texts, local)
4005.3.3 by Robert Collins
Test NetworkRecordStream with delta'd texts.
1917
        network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
1918
        records = []
1919
        # insert the stream from the network into a versioned files object so we can
1920
        # check the content was carried across correctly without doing delta
1921
        # inspection.
4005.3.5 by Robert Collins
Interface level test for using delta_closure=True over the network.
1922
        target_files.insert_record_stream(
1923
            self.assertStreamMetaEqual(records, ref, network_stream))
4005.3.3 by Robert Collins
Test NetworkRecordStream with delta'd texts.
1924
        # No duplicates on the wire thank you!
1925
        self.assertEqual(2, len(records) + skipped_records[0])
1926
        if len(records):
4005.3.4 by Robert Collins
Test copying just a delta over the wire.
1927
            # if any content was copied it all must have all been.
1928
            self.assertIdenticalVersionedFile(files, target_files)
1929
1930
    def test_get_record_stream_native_formats_are_wire_ready_delta(self):
1931
        # copy a delta over the wire
1932
        files = self.get_versionedfiles()
1933
        target_files = self.get_versionedfiles('target')
1934
        key = self.get_simple_key('ft')
1935
        key_delta = self.get_simple_key('delta')
1936
        files.add_lines(key, (), ['my text\n', 'content'])
1937
        if self.graph:
1938
            delta_parents = (key,)
1939
        else:
1940
            delta_parents = ()
1941
        files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
4005.3.5 by Robert Collins
Interface level test for using delta_closure=True over the network.
1942
        # Copy the basis text across so we can reconstruct the delta during
1943
        # insertion into target.
4005.3.4 by Robert Collins
Test copying just a delta over the wire.
1944
        target_files.insert_record_stream(files.get_record_stream([key],
1945
            'unordered', False))
1946
        local = files.get_record_stream([key_delta], 'unordered', False)
1947
        ref = files.get_record_stream([key_delta], 'unordered', False)
1948
        skipped_records = [0]
4005.3.5 by Robert Collins
Interface level test for using delta_closure=True over the network.
1949
        full_texts = {
1950
            key_delta: "different\ncontent\n",
1951
            }
1952
        byte_stream = self.stream_to_bytes_or_skip_counter(
1953
            skipped_records, full_texts, local)
4005.3.4 by Robert Collins
Test copying just a delta over the wire.
1954
        network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
1955
        records = []
1956
        # insert the stream from the network into a versioned files object so we can
1957
        # check the content was carried across correctly without doing delta
1958
        # inspection during check_stream.
4005.3.5 by Robert Collins
Interface level test for using delta_closure=True over the network.
1959
        target_files.insert_record_stream(
1960
            self.assertStreamMetaEqual(records, ref, network_stream))
4005.3.4 by Robert Collins
Test copying just a delta over the wire.
1961
        # No duplicates on the wire thank you!
1962
        self.assertEqual(1, len(records) + skipped_records[0])
1963
        if len(records):
1964
            # if any content was copied it all must have all been
4005.3.3 by Robert Collins
Test NetworkRecordStream with delta'd texts.
1965
            self.assertIdenticalVersionedFile(files, target_files)
1966
4005.3.5 by Robert Collins
Interface level test for using delta_closure=True over the network.
1967
    def test_get_record_stream_wire_ready_delta_closure_included(self):
1968
        # copy a delta over the wire with the ability to get its full text.
1969
        files = self.get_versionedfiles()
1970
        key = self.get_simple_key('ft')
1971
        key_delta = self.get_simple_key('delta')
1972
        files.add_lines(key, (), ['my text\n', 'content'])
1973
        if self.graph:
1974
            delta_parents = (key,)
1975
        else:
1976
            delta_parents = ()
1977
        files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
1978
        local = files.get_record_stream([key_delta], 'unordered', True)
1979
        ref = files.get_record_stream([key_delta], 'unordered', True)
1980
        skipped_records = [0]
1981
        full_texts = {
1982
            key_delta: "different\ncontent\n",
1983
            }
1984
        byte_stream = self.stream_to_bytes_or_skip_counter(
1985
            skipped_records, full_texts, local)
1986
        network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
1987
        records = []
1988
        # insert the stream from the network into a versioned files object so we can
1989
        # check the content was carried across correctly without doing delta
1990
        # inspection during check_stream.
1991
        for record in self.assertStreamMetaEqual(records, ref, network_stream):
1992
            # we have to be able to get the full text out:
1993
            self.assertRecordHasContent(record, full_texts[record.key])
1994
        # No duplicates on the wire thank you!
1995
        self.assertEqual(1, len(records) + skipped_records[0])
1996
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
1997
    def assertAbsentRecord(self, files, keys, parents, entries):
1998
        """Helper for test_get_record_stream_missing_records_are_absent."""
1999
        seen = set()
2000
        for factory in entries:
2001
            seen.add(factory.key)
2002
            if factory.key[-1] == 'absent':
2003
                self.assertEqual('absent', factory.storage_kind)
2004
                self.assertEqual(None, factory.sha1)
2005
                self.assertEqual(None, factory.parents)
2006
            else:
2007
                self.assertValidStorageKind(factory.storage_kind)
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
2008
                self.assertEqual(files.get_sha1s([factory.key])[factory.key],
2009
                    factory.sha1)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2010
                self.assertEqual(parents[factory.key], factory.parents)
2011
                self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
2012
                    str)
2013
        self.assertEqual(set(keys), seen)
2014
2015
    def test_filter_absent_records(self):
2016
        """Requested missing records can be filter trivially."""
2017
        files = self.get_versionedfiles()
2018
        self.get_diamond_files(files)
2019
        keys, _ = self.get_keys_and_sort_order()
2020
        parent_map = files.get_parent_map(keys)
2021
        # Add an absent record in the middle of the present keys. (We don't ask
2022
        # for just absent keys to ensure that content before and after the
2023
        # absent keys is still delivered).
2024
        present_keys = list(keys)
2025
        if self.key_length == 1:
2026
            keys.insert(2, ('extra',))
2027
        else:
2028
            keys.insert(2, ('extra', 'extra'))
2029
        entries = files.get_record_stream(keys, 'unordered', False)
2030
        seen = set()
2031
        self.capture_stream(files, versionedfile.filter_absent(entries), seen.add,
2032
            parent_map)
2033
        self.assertEqual(set(present_keys), seen)
2034
2035
    def get_mapper(self):
2036
        """Get a mapper suitable for the key length of the test interface."""
2037
        if self.key_length == 1:
2038
            return ConstantMapper('source')
2039
        else:
2040
            return HashEscapedPrefixMapper()
2041
2042
    def get_parents(self, parents):
2043
        """Get parents, taking self.graph into consideration."""
2044
        if self.graph:
2045
            return parents
2046
        else:
2047
            return None
2048
2049
    def test_get_parent_map(self):
2050
        files = self.get_versionedfiles()
2051
        if self.key_length == 1:
2052
            parent_details = [
2053
                (('r0',), self.get_parents(())),
2054
                (('r1',), self.get_parents((('r0',),))),
2055
                (('r2',), self.get_parents(())),
2056
                (('r3',), self.get_parents(())),
2057
                (('m',), self.get_parents((('r0',),('r1',),('r2',),('r3',)))),
2058
                ]
2059
        else:
2060
            parent_details = [
2061
                (('FileA', 'r0'), self.get_parents(())),
2062
                (('FileA', 'r1'), self.get_parents((('FileA', 'r0'),))),
2063
                (('FileA', 'r2'), self.get_parents(())),
2064
                (('FileA', 'r3'), self.get_parents(())),
2065
                (('FileA', 'm'), self.get_parents((('FileA', 'r0'),
2066
                    ('FileA', 'r1'), ('FileA', 'r2'), ('FileA', 'r3')))),
2067
                ]
2068
        for key, parents in parent_details:
2069
            files.add_lines(key, parents, [])
2070
            # immediately after adding it should be queryable.
2071
            self.assertEqual({key:parents}, files.get_parent_map([key]))
2072
        # We can ask for an empty set
2073
        self.assertEqual({}, files.get_parent_map([]))
2074
        # We can ask for many keys
2075
        all_parents = dict(parent_details)
2076
        self.assertEqual(all_parents, files.get_parent_map(all_parents.keys()))
2077
        # Absent keys are just not included in the result.
2078
        keys = all_parents.keys()
2079
        if self.key_length == 1:
2080
            keys.insert(1, ('missing',))
2081
        else:
2082
            keys.insert(1, ('missing', 'missing'))
2083
        # Absent keys are just ignored
2084
        self.assertEqual(all_parents, files.get_parent_map(keys))
2085
2086
    def test_get_sha1s(self):
2087
        files = self.get_versionedfiles()
2088
        self.get_diamond_files(files)
2089
        if self.key_length == 1:
2090
            keys = [('base',), ('origin',), ('left',), ('merged',), ('right',)]
2091
        else:
2092
            # ask for shas from different prefixes.
2093
            keys = [
2094
                ('FileA', 'base'), ('FileB', 'origin'), ('FileA', 'left'),
2095
                ('FileA', 'merged'), ('FileB', 'right'),
2096
                ]
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
2097
        self.assertEqual({
2098
            keys[0]: '51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',
2099
            keys[1]: '00e364d235126be43292ab09cb4686cf703ddc17',
2100
            keys[2]: 'a8478686da38e370e32e42e8a0c220e33ee9132f',
2101
            keys[3]: 'ed8bce375198ea62444dc71952b22cfc2b09226d',
2102
            keys[4]: '9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',
2103
            },
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2104
            files.get_sha1s(keys))
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
2105
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2106
    def test_insert_record_stream_empty(self):
2107
        """Inserting an empty record stream should work."""
2108
        files = self.get_versionedfiles()
2109
        files.insert_record_stream([])
2110
2111
    def assertIdenticalVersionedFile(self, expected, actual):
2112
        """Assert that left and right have the same contents."""
2113
        self.assertEqual(set(actual.keys()), set(expected.keys()))
2114
        actual_parents = actual.get_parent_map(actual.keys())
2115
        if self.graph:
2116
            self.assertEqual(actual_parents, expected.get_parent_map(expected.keys()))
2117
        else:
2118
            for key, parents in actual_parents.items():
2119
                self.assertEqual(None, parents)
2120
        for key in actual.keys():
2121
            actual_text = actual.get_record_stream(
2122
                [key], 'unordered', True).next().get_bytes_as('fulltext')
2123
            expected_text = expected.get_record_stream(
2124
                [key], 'unordered', True).next().get_bytes_as('fulltext')
2125
            self.assertEqual(actual_text, expected_text)
2126
2127
    def test_insert_record_stream_fulltexts(self):
2128
        """Any file should accept a stream of fulltexts."""
2129
        files = self.get_versionedfiles()
2130
        mapper = self.get_mapper()
2131
        source_transport = self.get_transport('source')
2132
        source_transport.mkdir('.')
2133
        # weaves always output fulltexts.
2134
        source = make_versioned_files_factory(WeaveFile, mapper)(
2135
            source_transport)
2136
        self.get_diamond_files(source, trailing_eol=False)
2137
        stream = source.get_record_stream(source.keys(), 'topological',
2138
            False)
2139
        files.insert_record_stream(stream)
2140
        self.assertIdenticalVersionedFile(source, files)
2141
2142
    def test_insert_record_stream_fulltexts_noeol(self):
2143
        """Any file should accept a stream of fulltexts."""
2144
        files = self.get_versionedfiles()
2145
        mapper = self.get_mapper()
2146
        source_transport = self.get_transport('source')
2147
        source_transport.mkdir('.')
2148
        # weaves always output fulltexts.
2149
        source = make_versioned_files_factory(WeaveFile, mapper)(
2150
            source_transport)
2151
        self.get_diamond_files(source, trailing_eol=False)
2152
        stream = source.get_record_stream(source.keys(), 'topological',
2153
            False)
2154
        files.insert_record_stream(stream)
2155
        self.assertIdenticalVersionedFile(source, files)
2156
2157
    def test_insert_record_stream_annotated_knits(self):
2158
        """Any file should accept a stream from plain knits."""
2159
        files = self.get_versionedfiles()
2160
        mapper = self.get_mapper()
2161
        source_transport = self.get_transport('source')
2162
        source_transport.mkdir('.')
2163
        source = make_file_factory(True, mapper)(source_transport)
2164
        self.get_diamond_files(source)
2165
        stream = source.get_record_stream(source.keys(), 'topological',
2166
            False)
2167
        files.insert_record_stream(stream)
2168
        self.assertIdenticalVersionedFile(source, files)
2169
2170
    def test_insert_record_stream_annotated_knits_noeol(self):
2171
        """Any file should accept a stream from plain knits."""
2172
        files = self.get_versionedfiles()
2173
        mapper = self.get_mapper()
2174
        source_transport = self.get_transport('source')
2175
        source_transport.mkdir('.')
2176
        source = make_file_factory(True, mapper)(source_transport)
2177
        self.get_diamond_files(source, trailing_eol=False)
2178
        stream = source.get_record_stream(source.keys(), 'topological',
2179
            False)
2180
        files.insert_record_stream(stream)
2181
        self.assertIdenticalVersionedFile(source, files)
2182
2183
    def test_insert_record_stream_plain_knits(self):
2184
        """Any file should accept a stream from plain knits."""
2185
        files = self.get_versionedfiles()
2186
        mapper = self.get_mapper()
2187
        source_transport = self.get_transport('source')
2188
        source_transport.mkdir('.')
2189
        source = make_file_factory(False, mapper)(source_transport)
2190
        self.get_diamond_files(source)
2191
        stream = source.get_record_stream(source.keys(), 'topological',
2192
            False)
2193
        files.insert_record_stream(stream)
2194
        self.assertIdenticalVersionedFile(source, files)
2195
2196
    def test_insert_record_stream_plain_knits_noeol(self):
2197
        """Any file should accept a stream from plain knits."""
2198
        files = self.get_versionedfiles()
2199
        mapper = self.get_mapper()
2200
        source_transport = self.get_transport('source')
2201
        source_transport.mkdir('.')
2202
        source = make_file_factory(False, mapper)(source_transport)
2203
        self.get_diamond_files(source, trailing_eol=False)
2204
        stream = source.get_record_stream(source.keys(), 'topological',
2205
            False)
2206
        files.insert_record_stream(stream)
2207
        self.assertIdenticalVersionedFile(source, files)
2208
2209
    def test_insert_record_stream_existing_keys(self):
2210
        """Inserting keys already in a file should not error."""
2211
        files = self.get_versionedfiles()
2212
        source = self.get_versionedfiles('source')
2213
        self.get_diamond_files(source)
2214
        # insert some keys into f.
2215
        self.get_diamond_files(files, left_only=True)
2216
        stream = source.get_record_stream(source.keys(), 'topological',
2217
            False)
2218
        files.insert_record_stream(stream)
2219
        self.assertIdenticalVersionedFile(source, files)
2220
2221
    def test_insert_record_stream_missing_keys(self):
2222
        """Inserting a stream with absent keys should raise an error."""
2223
        files = self.get_versionedfiles()
2224
        source = self.get_versionedfiles('source')
2225
        stream = source.get_record_stream([('missing',) * self.key_length],
2226
            'topological', False)
2227
        self.assertRaises(errors.RevisionNotPresent, files.insert_record_stream,
2228
            stream)
2229
2230
    def test_insert_record_stream_out_of_order(self):
2231
        """An out of order stream can either error or work."""
2232
        files = self.get_versionedfiles()
2233
        source = self.get_versionedfiles('source')
2234
        self.get_diamond_files(source)
2235
        if self.key_length == 1:
2236
            origin_keys = [('origin',)]
2237
            end_keys = [('merged',), ('left',)]
2238
            start_keys = [('right',), ('base',)]
2239
        else:
2240
            origin_keys = [('FileA', 'origin'), ('FileB', 'origin')]
2241
            end_keys = [('FileA', 'merged',), ('FileA', 'left',),
2242
                ('FileB', 'merged',), ('FileB', 'left',)]
2243
            start_keys = [('FileA', 'right',), ('FileA', 'base',),
2244
                ('FileB', 'right',), ('FileB', 'base',)]
2245
        origin_entries = source.get_record_stream(origin_keys, 'unordered', False)
2246
        end_entries = source.get_record_stream(end_keys, 'topological', False)
2247
        start_entries = source.get_record_stream(start_keys, 'topological', False)
2248
        entries = chain(origin_entries, end_entries, start_entries)
2249
        try:
2250
            files.insert_record_stream(entries)
2251
        except RevisionNotPresent:
2252
            # Must not have corrupted the file.
2253
            files.check()
2254
        else:
2255
            self.assertIdenticalVersionedFile(source, files)
2256
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2257
    def get_knit_delta_source(self):
2258
        """Get a source that can produce a stream with knit delta records,
2259
        regardless of this test's scenario.
2260
        """
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2261
        mapper = self.get_mapper()
2262
        source_transport = self.get_transport('source')
2263
        source_transport.mkdir('.')
2264
        source = make_file_factory(False, mapper)(source_transport)
4009.3.1 by Andrew Bennetts
Fix test_insert_record_stream_delta_missing_basis_no_corruption to test what it claims to, and fix KnitVersionedFiles.get_record_stream to match the expected exception.
2265
        get_diamond_files(source, self.key_length, trailing_eol=True,
2266
            nograph=False, left_only=False)
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2267
        return source
2268
2269
    def test_insert_record_stream_delta_missing_basis_no_corruption(self):
2270
        """Insertion where a needed basis is not included notifies the caller
2271
        of the missing basis.  In the meantime a record missing its basis is
2272
        not added.
2273
        """
2274
        source = self.get_knit_delta_source()
4009.3.7 by Andrew Bennetts
Most tests passing.
2275
        keys = [self.get_simple_key('origin'), self.get_simple_key('merged')]
2276
        entries = source.get_record_stream(keys, 'unordered', False)
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2277
        files = self.get_versionedfiles()
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2278
        if self.support_partial_insertion:
4009.3.12 by Robert Collins
Polish on inserting record streams with missing compression parents.
2279
            self.assertEqual([],
2280
                list(files.get_missing_compression_parent_keys()))
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2281
            files.insert_record_stream(entries)
2282
            missing_bases = files.get_missing_compression_parent_keys()
2283
            self.assertEqual(set([self.get_simple_key('left')]),
2284
                set(missing_bases))
4009.3.7 by Andrew Bennetts
Most tests passing.
2285
            self.assertEqual(set(keys), set(files.get_parent_map(keys)))
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2286
        else:
2287
            self.assertRaises(
2288
                errors.RevisionNotPresent, files.insert_record_stream, entries)
4009.3.7 by Andrew Bennetts
Most tests passing.
2289
            files.check()
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2290
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2291
    def test_insert_record_stream_delta_missing_basis_can_be_added_later(self):
2292
        """Insertion where a needed basis is not included notifies the caller
2293
        of the missing basis.  That basis can be added in a second
2294
        insert_record_stream call that does not need to repeat records present
4009.3.3 by Andrew Bennetts
Add docstrings.
2295
        in the previous stream.  The record(s) that required that basis are
2296
        fully inserted once their basis is no longer missing.
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2297
        """
2298
        if not self.support_partial_insertion:
2299
            raise TestNotApplicable(
2300
                'versioned file scenario does not support partial insertion')
2301
        source = self.get_knit_delta_source()
2302
        entries = source.get_record_stream([self.get_simple_key('origin'),
2303
            self.get_simple_key('merged')], 'unordered', False)
2304
        files = self.get_versionedfiles()
2305
        files.insert_record_stream(entries)
2306
        missing_bases = files.get_missing_compression_parent_keys()
2307
        self.assertEqual(set([self.get_simple_key('left')]),
2308
            set(missing_bases))
4009.3.7 by Andrew Bennetts
Most tests passing.
2309
        # 'merged' is inserted (although a commit of a write group involving
2310
        # this versionedfiles would fail).
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2311
        merged_key = self.get_simple_key('merged')
4009.3.7 by Andrew Bennetts
Most tests passing.
2312
        self.assertEqual(
2313
            [merged_key], files.get_parent_map([merged_key]).keys())
2314
        # Add the full delta closure of the missing records
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2315
        missing_entries = source.get_record_stream(
4009.3.7 by Andrew Bennetts
Most tests passing.
2316
            missing_bases, 'unordered', True)
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2317
        files.insert_record_stream(missing_entries)
4009.3.7 by Andrew Bennetts
Most tests passing.
2318
        # Now 'merged' is fully inserted (and a commit would succeed).
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2319
        self.assertEqual([], list(files.get_missing_compression_parent_keys()))
2320
        self.assertEqual(
2321
            [merged_key], files.get_parent_map([merged_key]).keys())
4009.3.7 by Andrew Bennetts
Most tests passing.
2322
        files.check()
4009.3.2 by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later.
2323
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2324
    def test_iter_lines_added_or_present_in_keys(self):
2325
        # test that we get at least an equalset of the lines added by
2326
        # versions in the store.
2327
        # the ordering here is to make a tree so that dumb searches have
2328
        # more changes to muck up.
2329
2330
        class InstrumentedProgress(progress.DummyProgress):
2331
2332
            def __init__(self):
2333
2334
                progress.DummyProgress.__init__(self)
2335
                self.updates = []
2336
2337
            def update(self, msg=None, current=None, total=None):
2338
                self.updates.append((msg, current, total))
2339
2340
        files = self.get_versionedfiles()
2341
        # add a base to get included
2342
        files.add_lines(self.get_simple_key('base'), (), ['base\n'])
2343
        # add a ancestor to be included on one side
2344
        files.add_lines(self.get_simple_key('lancestor'), (), ['lancestor\n'])
2345
        # add a ancestor to be included on the other side
2346
        files.add_lines(self.get_simple_key('rancestor'),
2347
            self.get_parents([self.get_simple_key('base')]), ['rancestor\n'])
2348
        # add a child of rancestor with no eofile-nl
2349
        files.add_lines(self.get_simple_key('child'),
2350
            self.get_parents([self.get_simple_key('rancestor')]),
2351
            ['base\n', 'child\n'])
2352
        # add a child of lancestor and base to join the two roots
2353
        files.add_lines(self.get_simple_key('otherchild'),
2354
            self.get_parents([self.get_simple_key('lancestor'),
2355
                self.get_simple_key('base')]),
2356
            ['base\n', 'lancestor\n', 'otherchild\n'])
2357
        def iter_with_keys(keys, expected):
2358
            # now we need to see what lines are returned, and how often.
2359
            lines = {}
2360
            progress = InstrumentedProgress()
2361
            # iterate over the lines
2362
            for line in files.iter_lines_added_or_present_in_keys(keys,
2363
                pb=progress):
2364
                lines.setdefault(line, 0)
2365
                lines[line] += 1
2366
            if []!= progress.updates:
2367
                self.assertEqual(expected, progress.updates)
2368
            return lines
2369
        lines = iter_with_keys(
2370
            [self.get_simple_key('child'), self.get_simple_key('otherchild')],
4103.3.4 by Martin Pool
Update test that depends on progress bar strings
2371
            [('Walking content', 0, 2),
2372
             ('Walking content', 1, 2),
2373
             ('Walking content', 2, 2)])
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2374
        # we must see child and otherchild
2375
        self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2376
        self.assertTrue(
2377
            lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2378
        # we dont care if we got more than that.
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
2379
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2380
        # test all lines
2381
        lines = iter_with_keys(files.keys(),
4103.3.4 by Martin Pool
Update test that depends on progress bar strings
2382
            [('Walking content', 0, 5),
2383
             ('Walking content', 1, 5),
2384
             ('Walking content', 2, 5),
2385
             ('Walking content', 3, 5),
2386
             ('Walking content', 4, 5),
2387
             ('Walking content', 5, 5)])
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2388
        # all lines must be seen at least once
2389
        self.assertTrue(lines[('base\n', self.get_simple_key('base'))] > 0)
2390
        self.assertTrue(
2391
            lines[('lancestor\n', self.get_simple_key('lancestor'))] > 0)
2392
        self.assertTrue(
2393
            lines[('rancestor\n', self.get_simple_key('rancestor'))] > 0)
2394
        self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2395
        self.assertTrue(
2396
            lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2397
2398
    def test_make_mpdiffs(self):
2399
        from bzrlib import multiparent
2400
        files = self.get_versionedfiles('source')
2401
        # add texts that should trip the knit maximum delta chain threshold
2402
        # as well as doing parallel chains of data in knits.
2403
        # this is done by two chains of 25 insertions
2404
        files.add_lines(self.get_simple_key('base'), [], ['line\n'])
2405
        files.add_lines(self.get_simple_key('noeol'),
2406
            self.get_parents([self.get_simple_key('base')]), ['line'])
2407
        # detailed eol tests:
2408
        # shared last line with parent no-eol
2409
        files.add_lines(self.get_simple_key('noeolsecond'),
2410
            self.get_parents([self.get_simple_key('noeol')]),
2411
                ['line\n', 'line'])
2412
        # differing last line with parent, both no-eol
2413
        files.add_lines(self.get_simple_key('noeolnotshared'),
2414
            self.get_parents([self.get_simple_key('noeolsecond')]),
2415
                ['line\n', 'phone'])
2416
        # add eol following a noneol parent, change content
2417
        files.add_lines(self.get_simple_key('eol'),
2418
            self.get_parents([self.get_simple_key('noeol')]), ['phone\n'])
2419
        # add eol following a noneol parent, no change content
2420
        files.add_lines(self.get_simple_key('eolline'),
2421
            self.get_parents([self.get_simple_key('noeol')]), ['line\n'])
2422
        # noeol with no parents:
2423
        files.add_lines(self.get_simple_key('noeolbase'), [], ['line'])
2424
        # noeol preceeding its leftmost parent in the output:
2425
        # this is done by making it a merge of two parents with no common
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
2426
        # anestry: noeolbase and noeol with the
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2427
        # later-inserted parent the leftmost.
2428
        files.add_lines(self.get_simple_key('eolbeforefirstparent'),
2429
            self.get_parents([self.get_simple_key('noeolbase'),
2430
                self.get_simple_key('noeol')]),
2431
            ['line'])
2432
        # two identical eol texts
2433
        files.add_lines(self.get_simple_key('noeoldup'),
2434
            self.get_parents([self.get_simple_key('noeol')]), ['line'])
2435
        next_parent = self.get_simple_key('base')
2436
        text_name = 'chain1-'
2437
        text = ['line\n']
2438
        sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
2439
                 1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
2440
                 2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
2441
                 3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
2442
                 4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
2443
                 5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
2444
                 6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
2445
                 7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
2446
                 8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
2447
                 9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
2448
                 10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
2449
                 11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
2450
                 12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
2451
                 13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
2452
                 14:'2c4b1736566b8ca6051e668de68650686a3922f2',
2453
                 15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
2454
                 16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
2455
                 17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
2456
                 18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
2457
                 19:'1ebed371807ba5935958ad0884595126e8c4e823',
2458
                 20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
2459
                 21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
2460
                 22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
2461
                 23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
2462
                 24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
2463
                 25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
2464
                 }
2465
        for depth in range(26):
2466
            new_version = self.get_simple_key(text_name + '%s' % depth)
2467
            text = text + ['line\n']
2468
            files.add_lines(new_version, self.get_parents([next_parent]), text)
2469
            next_parent = new_version
2470
        next_parent = self.get_simple_key('base')
2471
        text_name = 'chain2-'
2472
        text = ['line\n']
2473
        for depth in range(26):
2474
            new_version = self.get_simple_key(text_name + '%s' % depth)
2475
            text = text + ['line\n']
2476
            files.add_lines(new_version, self.get_parents([next_parent]), text)
2477
            next_parent = new_version
2478
        target = self.get_versionedfiles('target')
2479
        for key in multiparent.topo_iter_keys(files, files.keys()):
2480
            mpdiff = files.make_mpdiffs([key])[0]
2481
            parents = files.get_parent_map([key])[key] or []
2482
            target.add_mpdiffs(
3350.8.3 by Robert Collins
VF.get_sha1s needed changing to be stackable.
2483
                [(key, parents, files.get_sha1s([key])[key], mpdiff)])
3350.6.4 by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores.
2484
            self.assertEqualDiff(
2485
                files.get_record_stream([key], 'unordered',
2486
                    True).next().get_bytes_as('fulltext'),
2487
                target.get_record_stream([key], 'unordered',
2488
                    True).next().get_bytes_as('fulltext')
2489
                )
2490
2491
    def test_keys(self):
2492
        # While use is discouraged, versions() is still needed by aspects of
2493
        # bzr.
2494
        files = self.get_versionedfiles()
2495
        self.assertEqual(set(), set(files.keys()))
2496
        if self.key_length == 1:
2497
            key = ('foo',)
2498
        else:
2499
            key = ('foo', 'bar',)
2500
        files.add_lines(key, (), [])
2501
        self.assertEqual(set([key]), set(files.keys()))
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
2502
2503
2504
class VirtualVersionedFilesTests(TestCase):
2505
    """Basic tests for the VirtualVersionedFiles implementations."""
2506
2507
    def _get_parent_map(self, keys):
2508
        ret = {}
2509
        for k in keys:
2510
            if k in self._parent_map:
2511
                ret[k] = self._parent_map[k]
2512
        return ret
2513
2514
    def setUp(self):
2515
        TestCase.setUp(self)
2516
        self._lines = {}
2517
        self._parent_map = {}
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
2518
        self.texts = VirtualVersionedFiles(self._get_parent_map,
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
2519
                                           self._lines.get)
2520
2521
    def test_add_lines(self):
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
2522
        self.assertRaises(NotImplementedError,
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
2523
                self.texts.add_lines, "foo", [], [])
2524
2525
    def test_add_mpdiffs(self):
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
2526
        self.assertRaises(NotImplementedError,
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
2527
                self.texts.add_mpdiffs, [])
2528
2529
    def test_check(self):
2530
        self.assertTrue(self.texts.check())
2531
2532
    def test_insert_record_stream(self):
2533
        self.assertRaises(NotImplementedError, self.texts.insert_record_stream,
2534
                          [])
2535
3518.1.2 by Jelmer Vernooij
Fix some stylistic issues pointed out by Ian.
2536
    def test_get_sha1s_nonexistent(self):
2537
        self.assertEquals({}, self.texts.get_sha1s([("NONEXISTENT",)]))
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
2538
2539
    def test_get_sha1s(self):
2540
        self._lines["key"] = ["dataline1", "dataline2"]
2541
        self.assertEquals({("key",): osutils.sha_strings(self._lines["key"])},
2542
                           self.texts.get_sha1s([("key",)]))
2543
2544
    def test_get_parent_map(self):
2545
        self._parent_map = {"G": ("A", "B")}
3943.8.1 by Marius Kruger
remove all trailing whitespace from bzr source
2546
        self.assertEquals({("G",): (("A",),("B",))},
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
2547
                          self.texts.get_parent_map([("G",), ("L",)]))
2548
2549
    def test_get_record_stream(self):
2550
        self._lines["A"] = ["FOO", "BAR"]
2551
        it = self.texts.get_record_stream([("A",)], "unordered", True)
2552
        record = it.next()
3890.2.2 by John Arbash Meinel
Change the signature to report the storage kind as 'chunked'
2553
        self.assertEquals("chunked", record.storage_kind)
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
2554
        self.assertEquals("FOOBAR", record.get_bytes_as("fulltext"))
3890.2.2 by John Arbash Meinel
Change the signature to report the storage kind as 'chunked'
2555
        self.assertEquals(["FOO", "BAR"], record.get_bytes_as("chunked"))
3518.1.1 by Jelmer Vernooij
Add VirtualVersionedFiles class.
2556
2557
    def test_get_record_stream_absent(self):
2558
        it = self.texts.get_record_stream([("A",)], "unordered", True)
2559
        record = it.next()
2560
        self.assertEquals("absent", record.storage_kind)
2561
3949.4.1 by Jelmer Vernooij
Implement VirtualVersionedFiles.iter_lines_added_or_present_in_keys.
2562
    def test_iter_lines_added_or_present_in_keys(self):
2563
        self._lines["A"] = ["FOO", "BAR"]
2564
        self._lines["B"] = ["HEY"]
2565
        self._lines["C"] = ["Alberta"]
2566
        it = self.texts.iter_lines_added_or_present_in_keys([("A",), ("B",)])
4032.1.2 by John Arbash Meinel
Track down a few more files that have trailing whitespace.
2567
        self.assertEquals(sorted([("FOO", "A"), ("BAR", "A"), ("HEY", "B")]),
3949.4.1 by Jelmer Vernooij
Implement VirtualVersionedFiles.iter_lines_added_or_present_in_keys.
2568
            sorted(list(it)))
2569
3871.4.1 by John Arbash Meinel
Add a VFDecorator that can yield records in a specified order
2570
2571
class TestOrderingVersionedFilesDecorator(TestCaseWithMemoryTransport):
2572
2573
    def get_ordering_vf(self, key_priority):
2574
        builder = self.make_branch_builder('test')
2575
        builder.start_series()
2576
        builder.build_snapshot('A', None, [
2577
            ('add', ('', 'TREE_ROOT', 'directory', None))])
2578
        builder.build_snapshot('B', ['A'], [])
2579
        builder.build_snapshot('C', ['B'], [])
2580
        builder.build_snapshot('D', ['C'], [])
2581
        builder.finish_series()
2582
        b = builder.get_branch()
2583
        b.lock_read()
2584
        self.addCleanup(b.unlock)
2585
        vf = b.repository.inventories
2586
        return versionedfile.OrderingVersionedFilesDecorator(vf, key_priority)
2587
2588
    def test_get_empty(self):
2589
        vf = self.get_ordering_vf({})
2590
        self.assertEqual([], vf.calls)
2591
2592
    def test_get_record_stream_topological(self):
2593
        vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2594
        request_keys = [('B',), ('C',), ('D',), ('A',)]
2595
        keys = [r.key for r in vf.get_record_stream(request_keys,
2596
                                    'topological', False)]
2597
        # We should have gotten the keys in topological order
2598
        self.assertEqual([('A',), ('B',), ('C',), ('D',)], keys)
2599
        # And recorded that the request was made
2600
        self.assertEqual([('get_record_stream', request_keys, 'topological',
2601
                           False)], vf.calls)
2602
2603
    def test_get_record_stream_ordered(self):
2604
        vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2605
        request_keys = [('B',), ('C',), ('D',), ('A',)]
2606
        keys = [r.key for r in vf.get_record_stream(request_keys,
2607
                                   'unordered', False)]
2608
        # They should be returned based on their priority
2609
        self.assertEqual([('D',), ('B',), ('A',), ('C',)], keys)
2610
        # And the request recorded
2611
        self.assertEqual([('get_record_stream', request_keys, 'unordered',
2612
                           False)], vf.calls)
2613
2614
    def test_get_record_stream_implicit_order(self):
2615
        vf = self.get_ordering_vf({('B',): 2, ('D',): 1})
2616
        request_keys = [('B',), ('C',), ('D',), ('A',)]
2617
        keys = [r.key for r in vf.get_record_stream(request_keys,
2618
                                   'unordered', False)]
2619
        # A and C are not in the map, so they get sorted to the front. A comes
2620
        # before C alphabetically, so it comes back first
2621
        self.assertEqual([('A',), ('C',), ('D',), ('B',)], keys)
2622
        # And the request recorded
2623
        self.assertEqual([('get_record_stream', request_keys, 'unordered',
2624
                           False)], vf.calls)