/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/tests/per_versionedfile.py

  • Committer: Jelmer Vernooij
  • Date: 2018-05-06 11:48:54 UTC
  • mto: This revision was merged to the branch mainline in revision 6960.
  • Revision ID: jelmer@jelmer.uk-20180506114854-h4qd9ojaqy8wxjsd
Move .mailmap to root.

Show diffs side-by-side

added added

removed removed

Lines of Context:
59
59
from ..transport.memory import MemoryTransport
60
60
from ..bzr import versionedfile as versionedfile
61
61
from ..bzr.versionedfile import (
62
 
    ChunkedContentFactory,
63
62
    ConstantMapper,
64
63
    HashEscapedPrefixMapper,
65
64
    PrefixMapper,
83
82
    :param trailing_eol: If True end the last line with \n.
84
83
    """
85
84
    parents = {
86
 
        b'origin': (),
87
 
        b'base': ((b'origin',),),
88
 
        b'left': ((b'base',),),
89
 
        b'right': ((b'base',),),
90
 
        b'merged': ((b'left',), (b'right',)),
 
85
        'origin': (),
 
86
        'base': (('origin',),),
 
87
        'left': (('base',),),
 
88
        'right': (('base',),),
 
89
        'merged': (('left',), ('right',)),
91
90
        }
92
91
    # insert a diamond graph to exercise deltas and merges.
93
92
    if trailing_eol:
94
 
        last_char = b'\n'
 
93
        last_char = '\n'
95
94
    else:
96
 
        last_char = b''
97
 
    f.add_lines(b'origin', [], [b'origin' + last_char])
98
 
    f.add_lines(b'base', [b'origin'], [b'base' + last_char])
99
 
    f.add_lines(b'left', [b'base'], [b'base\n', b'left' + last_char])
 
95
        last_char = ''
 
96
    f.add_lines('origin', [], ['origin' + last_char])
 
97
    f.add_lines('base', ['origin'], ['base' + last_char])
 
98
    f.add_lines('left', ['base'], ['base\n', 'left' + last_char])
100
99
    if not left_only:
101
 
        f.add_lines(b'right', [b'base'],
102
 
                    [b'base\n', b'right' + last_char])
103
 
        f.add_lines(b'merged', [b'left', b'right'],
104
 
                    [b'base\n', b'left\n', b'right\n', b'merged' + last_char])
 
100
        f.add_lines('right', ['base'],
 
101
            ['base\n', 'right' + last_char])
 
102
        f.add_lines('merged', ['left', 'right'],
 
103
            ['base\n', 'left\n', 'right\n', 'merged' + last_char])
105
104
    return f, parents
106
105
 
107
106
 
108
107
def get_diamond_files(files, key_length, trailing_eol=True, left_only=False,
109
 
                      nograph=False, nokeys=False):
 
108
    nograph=False, nokeys=False):
110
109
    """Get a diamond graph to exercise deltas and merges.
111
110
 
112
111
    This creates a 5-node graph in files. If files supports 2-length keys two
128
127
    if key_length == 1:
129
128
        prefixes = [()]
130
129
    else:
131
 
        prefixes = [(b'FileA',), (b'FileB',)]
 
130
        prefixes = [('FileA',), ('FileB',)]
132
131
    # insert a diamond graph to exercise deltas and merges.
133
132
    if trailing_eol:
134
 
        last_char = b'\n'
 
133
        last_char = '\n'
135
134
    else:
136
 
        last_char = b''
 
135
        last_char = ''
137
136
    result = []
138
 
 
139
137
    def get_parents(suffix_list):
140
138
        if nograph:
141
139
            return ()
142
140
        else:
143
141
            result = [prefix + suffix for suffix in suffix_list]
144
142
            return result
145
 
 
146
143
    def get_key(suffix):
147
144
        if nokeys:
148
145
            return (None, )
151
148
    # we loop over each key because that spreads the inserts across prefixes,
152
149
    # which is how commit operates.
153
150
    for prefix in prefixes:
154
 
        result.append(files.add_lines(prefix + get_key(b'origin'), (),
155
 
                                      [b'origin' + last_char]))
156
 
    for prefix in prefixes:
157
 
        result.append(files.add_lines(prefix + get_key(b'base'),
158
 
                                      get_parents([(b'origin',)]), [b'base' + last_char]))
159
 
    for prefix in prefixes:
160
 
        result.append(files.add_lines(prefix + get_key(b'left'),
161
 
                                      get_parents([(b'base',)]),
162
 
                                      [b'base\n', b'left' + last_char]))
 
151
        result.append(files.add_lines(prefix + get_key('origin'), (),
 
152
            ['origin' + last_char]))
 
153
    for prefix in prefixes:
 
154
        result.append(files.add_lines(prefix + get_key('base'),
 
155
            get_parents([('origin',)]), ['base' + last_char]))
 
156
    for prefix in prefixes:
 
157
        result.append(files.add_lines(prefix + get_key('left'),
 
158
            get_parents([('base',)]),
 
159
            ['base\n', 'left' + last_char]))
163
160
    if not left_only:
164
161
        for prefix in prefixes:
165
 
            result.append(files.add_lines(prefix + get_key(b'right'),
166
 
                                          get_parents([(b'base',)]),
167
 
                                          [b'base\n', b'right' + last_char]))
 
162
            result.append(files.add_lines(prefix + get_key('right'),
 
163
                get_parents([('base',)]),
 
164
                ['base\n', 'right' + last_char]))
168
165
        for prefix in prefixes:
169
 
            result.append(files.add_lines(prefix + get_key(b'merged'),
170
 
                                          get_parents(
171
 
                                              [(b'left',), (b'right',)]),
172
 
                                          [b'base\n', b'left\n', b'right\n', b'merged' + last_char]))
 
166
            result.append(files.add_lines(prefix + get_key('merged'),
 
167
                get_parents([('left',), ('right',)]),
 
168
                ['base\n', 'left\n', 'right\n', 'merged' + last_char]))
173
169
    return result
174
170
 
175
171
 
188
184
 
189
185
    def test_add(self):
190
186
        f = self.get_file()
191
 
        f.add_lines(b'r0', [], [b'a\n', b'b\n'])
192
 
        f.add_lines(b'r1', [b'r0'], [b'b\n', b'c\n'])
193
 
 
 
187
        f.add_lines('r0', [], ['a\n', 'b\n'])
 
188
        f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
194
189
        def verify_file(f):
195
190
            versions = f.versions()
196
 
            self.assertTrue(b'r0' in versions)
197
 
            self.assertTrue(b'r1' in versions)
198
 
            self.assertEqual(f.get_lines(b'r0'), [b'a\n', b'b\n'])
199
 
            self.assertEqual(f.get_text(b'r0'), b'a\nb\n')
200
 
            self.assertEqual(f.get_lines(b'r1'), [b'b\n', b'c\n'])
 
191
            self.assertTrue('r0' in versions)
 
192
            self.assertTrue('r1' in versions)
 
193
            self.assertEqual(f.get_lines('r0'), ['a\n', 'b\n'])
 
194
            self.assertEqual(f.get_text('r0'), 'a\nb\n')
 
195
            self.assertEqual(f.get_lines('r1'), ['b\n', 'c\n'])
201
196
            self.assertEqual(2, len(f))
202
197
            self.assertEqual(2, f.num_versions())
203
198
 
204
199
            self.assertRaises(RevisionNotPresent,
205
 
                              f.add_lines, b'r2', [b'foo'], [])
 
200
                f.add_lines, 'r2', ['foo'], [])
206
201
            self.assertRaises(RevisionAlreadyPresent,
207
 
                              f.add_lines, b'r1', [], [])
 
202
                f.add_lines, 'r1', [], [])
208
203
        verify_file(f)
209
204
        # this checks that reopen with create=True does not break anything.
210
205
        f = self.reopen_file(create=True)
213
208
    def test_adds_with_parent_texts(self):
214
209
        f = self.get_file()
215
210
        parent_texts = {}
216
 
        _, _, parent_texts[b'r0'] = f.add_lines(b'r0', [], [b'a\n', b'b\n'])
 
211
        _, _, parent_texts['r0'] = f.add_lines('r0', [], ['a\n', 'b\n'])
217
212
        try:
218
 
            _, _, parent_texts[b'r1'] = f.add_lines_with_ghosts(b'r1',
219
 
                                                                [b'r0', b'ghost'], [b'b\n', b'c\n'], parent_texts=parent_texts)
 
213
            _, _, parent_texts['r1'] = f.add_lines_with_ghosts('r1',
 
214
                ['r0', 'ghost'], ['b\n', 'c\n'], parent_texts=parent_texts)
220
215
        except NotImplementedError:
221
216
            # if the format doesn't support ghosts, just add normally.
222
 
            _, _, parent_texts[b'r1'] = f.add_lines(b'r1',
223
 
                                                    [b'r0'], [b'b\n', b'c\n'], parent_texts=parent_texts)
224
 
        f.add_lines(b'r2', [b'r1'], [b'c\n', b'd\n'],
225
 
                    parent_texts=parent_texts)
226
 
        self.assertNotEqual(None, parent_texts[b'r0'])
227
 
        self.assertNotEqual(None, parent_texts[b'r1'])
228
 
 
 
217
            _, _, parent_texts['r1'] = f.add_lines('r1',
 
218
                ['r0'], ['b\n', 'c\n'], parent_texts=parent_texts)
 
219
        f.add_lines('r2', ['r1'], ['c\n', 'd\n'], parent_texts=parent_texts)
 
220
        self.assertNotEqual(None, parent_texts['r0'])
 
221
        self.assertNotEqual(None, parent_texts['r1'])
229
222
        def verify_file(f):
230
223
            versions = f.versions()
231
 
            self.assertTrue(b'r0' in versions)
232
 
            self.assertTrue(b'r1' in versions)
233
 
            self.assertTrue(b'r2' in versions)
234
 
            self.assertEqual(f.get_lines(b'r0'), [b'a\n', b'b\n'])
235
 
            self.assertEqual(f.get_lines(b'r1'), [b'b\n', b'c\n'])
236
 
            self.assertEqual(f.get_lines(b'r2'), [b'c\n', b'd\n'])
 
224
            self.assertTrue('r0' in versions)
 
225
            self.assertTrue('r1' in versions)
 
226
            self.assertTrue('r2' in versions)
 
227
            self.assertEqual(f.get_lines('r0'), ['a\n', 'b\n'])
 
228
            self.assertEqual(f.get_lines('r1'), ['b\n', 'c\n'])
 
229
            self.assertEqual(f.get_lines('r2'), ['c\n', 'd\n'])
237
230
            self.assertEqual(3, f.num_versions())
238
 
            origins = f.annotate(b'r1')
239
 
            self.assertEqual(origins[0][0], b'r0')
240
 
            self.assertEqual(origins[1][0], b'r1')
241
 
            origins = f.annotate(b'r2')
242
 
            self.assertEqual(origins[0][0], b'r1')
243
 
            self.assertEqual(origins[1][0], b'r2')
 
231
            origins = f.annotate('r1')
 
232
            self.assertEqual(origins[0][0], 'r0')
 
233
            self.assertEqual(origins[1][0], 'r1')
 
234
            origins = f.annotate('r2')
 
235
            self.assertEqual(origins[0][0], 'r1')
 
236
            self.assertEqual(origins[1][0], 'r2')
244
237
 
245
238
        verify_file(f)
246
239
        f = self.reopen_file()
251
244
        # versioned files version sequences of bytes only.
252
245
        vf = self.get_file()
253
246
        self.assertRaises(errors.BzrBadParameterUnicode,
254
 
                          vf.add_lines, b'a', [], [b'a\n', u'b\n', b'c\n'])
 
247
            vf.add_lines, 'a', [], ['a\n', u'b\n', 'c\n'])
255
248
        self.assertRaises(
256
249
            (errors.BzrBadParameterUnicode, NotImplementedError),
257
 
            vf.add_lines_with_ghosts, b'a', [], [b'a\n', u'b\n', b'c\n'])
 
250
            vf.add_lines_with_ghosts, 'a', [], ['a\n', u'b\n', 'c\n'])
258
251
 
259
252
    def test_add_follows_left_matching_blocks(self):
260
253
        """If we change left_matching_blocks, delta changes
265
258
        vf = self.get_file()
266
259
        if isinstance(vf, WeaveFile):
267
260
            raise TestSkipped("WeaveFile ignores left_matching_blocks")
268
 
        vf.add_lines(b'1', [], [b'a\n'])
269
 
        vf.add_lines(b'2', [b'1'], [b'a\n', b'a\n', b'a\n'],
 
261
        vf.add_lines('1', [], ['a\n'])
 
262
        vf.add_lines('2', ['1'], ['a\n', 'a\n', 'a\n'],
270
263
                     left_matching_blocks=[(0, 0, 1), (1, 3, 0)])
271
 
        self.assertEqual([b'a\n', b'a\n', b'a\n'], vf.get_lines(b'2'))
272
 
        vf.add_lines(b'3', [b'1'], [b'a\n', b'a\n', b'a\n'],
 
264
        self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('2'))
 
265
        vf.add_lines('3', ['1'], ['a\n', 'a\n', 'a\n'],
273
266
                     left_matching_blocks=[(0, 2, 1), (1, 3, 0)])
274
 
        self.assertEqual([b'a\n', b'a\n', b'a\n'], vf.get_lines(b'3'))
 
267
        self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('3'))
275
268
 
276
269
    def test_inline_newline_throws(self):
277
270
        # \r characters are not permitted in lines being added
278
271
        vf = self.get_file()
279
272
        self.assertRaises(errors.BzrBadParameterContainsNewline,
280
 
                          vf.add_lines, b'a', [], [b'a\n\n'])
 
273
            vf.add_lines, 'a', [], ['a\n\n'])
281
274
        self.assertRaises(
282
275
            (errors.BzrBadParameterContainsNewline, NotImplementedError),
283
 
            vf.add_lines_with_ghosts, b'a', [], [b'a\n\n'])
 
276
            vf.add_lines_with_ghosts, 'a', [], ['a\n\n'])
284
277
        # but inline CR's are allowed
285
 
        vf.add_lines(b'a', [], [b'a\r\n'])
 
278
        vf.add_lines('a', [], ['a\r\n'])
286
279
        try:
287
 
            vf.add_lines_with_ghosts(b'b', [], [b'a\r\n'])
 
280
            vf.add_lines_with_ghosts('b', [], ['a\r\n'])
288
281
        except NotImplementedError:
289
282
            pass
290
283
 
291
284
    def test_add_reserved(self):
292
285
        vf = self.get_file()
293
286
        self.assertRaises(errors.ReservedId,
294
 
                          vf.add_lines, b'a:', [], [b'a\n', b'b\n', b'c\n'])
 
287
            vf.add_lines, 'a:', [], ['a\n', 'b\n', 'c\n'])
295
288
 
296
289
    def test_add_lines_nostoresha(self):
297
290
        """When nostore_sha is supplied using old content raises."""
298
291
        vf = self.get_file()
299
 
        empty_text = (b'a', [])
300
 
        sample_text_nl = (b'b', [b"foo\n", b"bar\n"])
301
 
        sample_text_no_nl = (b'c', [b"foo\n", b"bar"])
 
292
        empty_text = ('a', [])
 
293
        sample_text_nl = ('b', ["foo\n", "bar\n"])
 
294
        sample_text_no_nl = ('c', ["foo\n", "bar"])
302
295
        shas = []
303
296
        for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
304
297
            sha, _, _ = vf.add_lines(version, [], lines)
305
298
            shas.append(sha)
306
299
        # we now have a copy of all the lines in the vf.
307
300
        for sha, (version, lines) in zip(
308
 
                shas, (empty_text, sample_text_nl, sample_text_no_nl)):
 
301
            shas, (empty_text, sample_text_nl, sample_text_no_nl)):
309
302
            self.assertRaises(errors.ExistingContent,
310
 
                              vf.add_lines, version + b"2", [], lines,
311
 
                              nostore_sha=sha)
 
303
                vf.add_lines, version + "2", [], lines,
 
304
                nostore_sha=sha)
312
305
            # and no new version should have been added.
313
306
            self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
314
 
                              version + b"2")
 
307
                version + "2")
315
308
 
316
309
    def test_add_lines_with_ghosts_nostoresha(self):
317
310
        """When nostore_sha is supplied using old content raises."""
318
311
        vf = self.get_file()
319
 
        empty_text = (b'a', [])
320
 
        sample_text_nl = (b'b', [b"foo\n", b"bar\n"])
321
 
        sample_text_no_nl = (b'c', [b"foo\n", b"bar"])
 
312
        empty_text = ('a', [])
 
313
        sample_text_nl = ('b', ["foo\n", "bar\n"])
 
314
        sample_text_no_nl = ('c', ["foo\n", "bar"])
322
315
        shas = []
323
316
        for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
324
317
            sha, _, _ = vf.add_lines(version, [], lines)
326
319
        # we now have a copy of all the lines in the vf.
327
320
        # is the test applicable to this vf implementation?
328
321
        try:
329
 
            vf.add_lines_with_ghosts(b'd', [], [])
 
322
            vf.add_lines_with_ghosts('d', [], [])
330
323
        except NotImplementedError:
331
324
            raise TestSkipped("add_lines_with_ghosts is optional")
332
325
        for sha, (version, lines) in zip(
333
 
                shas, (empty_text, sample_text_nl, sample_text_no_nl)):
 
326
            shas, (empty_text, sample_text_nl, sample_text_no_nl)):
334
327
            self.assertRaises(errors.ExistingContent,
335
 
                              vf.add_lines_with_ghosts, version + b"2", [], lines,
336
 
                              nostore_sha=sha)
 
328
                vf.add_lines_with_ghosts, version + "2", [], lines,
 
329
                nostore_sha=sha)
337
330
            # and no new version should have been added.
338
331
            self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
339
 
                              version + b"2")
 
332
                version + "2")
340
333
 
341
334
    def test_add_lines_return_value(self):
342
335
        # add_lines should return the sha1 and the text size.
343
336
        vf = self.get_file()
344
 
        empty_text = (b'a', [])
345
 
        sample_text_nl = (b'b', [b"foo\n", b"bar\n"])
346
 
        sample_text_no_nl = (b'c', [b"foo\n", b"bar"])
 
337
        empty_text = ('a', [])
 
338
        sample_text_nl = ('b', ["foo\n", "bar\n"])
 
339
        sample_text_no_nl = ('c', ["foo\n", "bar"])
347
340
        # check results for the three cases:
348
341
        for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
349
342
            # the first two elements are the same for all versioned files:
352
345
            result = vf.add_lines(version, [], lines)
353
346
            self.assertEqual(3, len(result))
354
347
            self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
355
 
                             result[0:2])
 
348
                result[0:2])
356
349
        # parents should not affect the result:
357
350
        lines = sample_text_nl[1]
358
351
        self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
359
 
                         vf.add_lines(b'd', [b'b', b'c'], lines)[0:2])
 
352
            vf.add_lines('d', ['b', 'c'], lines)[0:2])
360
353
 
361
354
    def test_get_reserved(self):
362
355
        vf = self.get_file()
363
 
        self.assertRaises(errors.ReservedId, vf.get_texts, [b'b:'])
364
 
        self.assertRaises(errors.ReservedId, vf.get_lines, b'b:')
365
 
        self.assertRaises(errors.ReservedId, vf.get_text, b'b:')
 
356
        self.assertRaises(errors.ReservedId, vf.get_texts, ['b:'])
 
357
        self.assertRaises(errors.ReservedId, vf.get_lines, 'b:')
 
358
        self.assertRaises(errors.ReservedId, vf.get_text, 'b:')
366
359
 
367
360
    def test_add_unchanged_last_line_noeol_snapshot(self):
368
361
        """Add a text with an unchanged last line with no eol should work."""
377
370
        for length in range(20):
378
371
            version_lines = {}
379
372
            vf = self.get_file('case-%d' % length)
380
 
            prefix = b'step-%d'
 
373
            prefix = 'step-%d'
381
374
            parents = []
382
375
            for step in range(length):
383
376
                version = prefix % step
384
 
                lines = ([b'prelude \n'] * step) + [b'line']
 
377
                lines = (['prelude \n'] * step) + ['line']
385
378
                vf.add_lines(version, parents, lines)
386
379
                version_lines[version] = lines
387
380
                parents = [version]
388
 
            vf.add_lines(b'no-eol', parents, [b'line'])
 
381
            vf.add_lines('no-eol', parents, ['line'])
389
382
            vf.get_texts(version_lines.keys())
390
 
            self.assertEqualDiff(b'line', vf.get_text(b'no-eol'))
 
383
            self.assertEqualDiff('line', vf.get_text('no-eol'))
391
384
 
392
385
    def test_get_texts_eol_variation(self):
393
386
        # similar to the failure in <http://bugs.launchpad.net/234748>
394
387
        vf = self.get_file()
395
 
        sample_text_nl = [b"line\n"]
396
 
        sample_text_no_nl = [b"line"]
 
388
        sample_text_nl = ["line\n"]
 
389
        sample_text_no_nl = ["line"]
397
390
        versions = []
398
391
        version_lines = {}
399
392
        parents = []
400
393
        for i in range(4):
401
 
            version = b'v%d' % i
 
394
            version = 'v%d' % i
402
395
            if i % 2:
403
396
                lines = sample_text_nl
404
397
            else:
410
403
            # (which is what this test tests) will generate a correct line
411
404
            # delta (which is to say, an empty delta).
412
405
            vf.add_lines(version, parents, lines,
413
 
                         left_matching_blocks=[(0, 0, 1)])
 
406
                left_matching_blocks=[(0, 0, 1)])
414
407
            parents = [version]
415
408
            versions.append(version)
416
409
            version_lines[version] = lines
428
421
        # Test adding this in two situations:
429
422
        # On top of a new insertion
430
423
        vf = self.get_file('fulltext')
431
 
        vf.add_lines(b'noeol', [], [b'line'])
432
 
        vf.add_lines(b'noeol2', [b'noeol'], [b'newline\n', b'line'],
433
 
                     left_matching_blocks=[(0, 1, 1)])
434
 
        self.assertEqualDiff(b'newline\nline', vf.get_text(b'noeol2'))
 
424
        vf.add_lines('noeol', [], ['line'])
 
425
        vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
 
426
            left_matching_blocks=[(0, 1, 1)])
 
427
        self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
435
428
        # On top of a delta
436
429
        vf = self.get_file('delta')
437
 
        vf.add_lines(b'base', [], [b'line'])
438
 
        vf.add_lines(b'noeol', [b'base'], [b'prelude\n', b'line'])
439
 
        vf.add_lines(b'noeol2', [b'noeol'], [b'newline\n', b'line'],
440
 
                     left_matching_blocks=[(1, 1, 1)])
441
 
        self.assertEqualDiff(b'newline\nline', vf.get_text(b'noeol2'))
 
430
        vf.add_lines('base', [], ['line'])
 
431
        vf.add_lines('noeol', ['base'], ['prelude\n', 'line'])
 
432
        vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
 
433
            left_matching_blocks=[(1, 1, 1)])
 
434
        self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
442
435
 
443
436
    def test_make_mpdiffs(self):
444
437
        from breezy import multiparent
455
448
    def test_make_mpdiffs_with_ghosts(self):
456
449
        vf = self.get_file('foo')
457
450
        try:
458
 
            vf.add_lines_with_ghosts(b'text', [b'ghost'], [b'line\n'])
 
451
            vf.add_lines_with_ghosts('text', ['ghost'], ['line\n'])
459
452
        except NotImplementedError:
460
453
            # old Weave formats do not allow ghosts
461
454
            return
462
 
        self.assertRaises(errors.RevisionNotPresent,
463
 
                          vf.make_mpdiffs, [b'ghost'])
 
455
        self.assertRaises(errors.RevisionNotPresent, vf.make_mpdiffs, ['ghost'])
464
456
 
465
457
    def _setup_for_deltas(self, f):
466
458
        self.assertFalse(f.has_version('base'))
467
459
        # add texts that should trip the knit maximum delta chain threshold
468
460
        # as well as doing parallel chains of data in knits.
469
461
        # this is done by two chains of 25 insertions
470
 
        f.add_lines(b'base', [], [b'line\n'])
471
 
        f.add_lines(b'noeol', [b'base'], [b'line'])
 
462
        f.add_lines('base', [], ['line\n'])
 
463
        f.add_lines('noeol', ['base'], ['line'])
472
464
        # detailed eol tests:
473
465
        # shared last line with parent no-eol
474
 
        f.add_lines(b'noeolsecond', [b'noeol'], [b'line\n', b'line'])
 
466
        f.add_lines('noeolsecond', ['noeol'], ['line\n', 'line'])
475
467
        # differing last line with parent, both no-eol
476
 
        f.add_lines(b'noeolnotshared', [b'noeolsecond'], [b'line\n', b'phone'])
 
468
        f.add_lines('noeolnotshared', ['noeolsecond'], ['line\n', 'phone'])
477
469
        # add eol following a noneol parent, change content
478
 
        f.add_lines(b'eol', [b'noeol'], [b'phone\n'])
 
470
        f.add_lines('eol', ['noeol'], ['phone\n'])
479
471
        # add eol following a noneol parent, no change content
480
 
        f.add_lines(b'eolline', [b'noeol'], [b'line\n'])
 
472
        f.add_lines('eolline', ['noeol'], ['line\n'])
481
473
        # noeol with no parents:
482
 
        f.add_lines(b'noeolbase', [], [b'line'])
 
474
        f.add_lines('noeolbase', [], ['line'])
483
475
        # noeol preceeding its leftmost parent in the output:
484
476
        # this is done by making it a merge of two parents with no common
485
477
        # anestry: noeolbase and noeol with the
486
478
        # later-inserted parent the leftmost.
487
 
        f.add_lines(b'eolbeforefirstparent', [
488
 
                    b'noeolbase', b'noeol'], [b'line'])
 
479
        f.add_lines('eolbeforefirstparent', ['noeolbase', 'noeol'], ['line'])
489
480
        # two identical eol texts
490
 
        f.add_lines(b'noeoldup', [b'noeol'], [b'line'])
491
 
        next_parent = b'base'
492
 
        text_name = b'chain1-'
493
 
        text = [b'line\n']
494
 
        sha1s = {0: b'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
495
 
                 1: b'45e21ea146a81ea44a821737acdb4f9791c8abe7',
496
 
                 2: b'e1f11570edf3e2a070052366c582837a4fe4e9fa',
497
 
                 3: b'26b4b8626da827088c514b8f9bbe4ebf181edda1',
498
 
                 4: b'e28a5510be25ba84d31121cff00956f9970ae6f6',
499
 
                 5: b'd63ec0ce22e11dcf65a931b69255d3ac747a318d',
500
 
                 6: b'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
501
 
                 7: b'95c14da9cafbf828e3e74a6f016d87926ba234ab',
502
 
                 8: b'779e9a0b28f9f832528d4b21e17e168c67697272',
503
 
                 9: b'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
504
 
                 10: b'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
505
 
                 11: b'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
506
 
                 12: b'31a2286267f24d8bedaa43355f8ad7129509ea85',
507
 
                 13: b'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
508
 
                 14: b'2c4b1736566b8ca6051e668de68650686a3922f2',
509
 
                 15: b'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
510
 
                 16: b'b0d2e18d3559a00580f6b49804c23fea500feab3',
511
 
                 17: b'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
512
 
                 18: b'5cf64a3459ae28efa60239e44b20312d25b253f3',
513
 
                 19: b'1ebed371807ba5935958ad0884595126e8c4e823',
514
 
                 20: b'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
515
 
                 21: b'01edc447978004f6e4e962b417a4ae1955b6fe5d',
516
 
                 22: b'd8d8dc49c4bf0bab401e0298bb5ad827768618bb',
517
 
                 23: b'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
518
 
                 24: b'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
519
 
                 25: b'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
 
481
        f.add_lines('noeoldup', ['noeol'], ['line'])
 
482
        next_parent = 'base'
 
483
        text_name = 'chain1-'
 
484
        text = ['line\n']
 
485
        sha1s = {0: 'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
 
486
                 1: '45e21ea146a81ea44a821737acdb4f9791c8abe7',
 
487
                 2: 'e1f11570edf3e2a070052366c582837a4fe4e9fa',
 
488
                 3: '26b4b8626da827088c514b8f9bbe4ebf181edda1',
 
489
                 4: 'e28a5510be25ba84d31121cff00956f9970ae6f6',
 
490
                 5: 'd63ec0ce22e11dcf65a931b69255d3ac747a318d',
 
491
                 6: '2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
 
492
                 7: '95c14da9cafbf828e3e74a6f016d87926ba234ab',
 
493
                 8: '779e9a0b28f9f832528d4b21e17e168c67697272',
 
494
                 9: '1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
 
495
                 10: '131a2ae712cf51ed62f143e3fbac3d4206c25a05',
 
496
                 11: 'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
 
497
                 12: '31a2286267f24d8bedaa43355f8ad7129509ea85',
 
498
                 13: 'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
 
499
                 14: '2c4b1736566b8ca6051e668de68650686a3922f2',
 
500
                 15: '5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
 
501
                 16: 'b0d2e18d3559a00580f6b49804c23fea500feab3',
 
502
                 17: '8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
 
503
                 18: '5cf64a3459ae28efa60239e44b20312d25b253f3',
 
504
                 19: '1ebed371807ba5935958ad0884595126e8c4e823',
 
505
                 20: '2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
 
506
                 21: '01edc447978004f6e4e962b417a4ae1955b6fe5d',
 
507
                 22: 'd8d8dc49c4bf0bab401e0298bb5ad827768618bb',
 
508
                 23: 'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
 
509
                 24: 'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
 
510
                 25: 'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
520
511
                 }
521
512
        for depth in range(26):
522
 
            new_version = text_name + b'%d' % depth
523
 
            text = text + [b'line\n']
 
513
            new_version = text_name + '%s' % depth
 
514
            text = text + ['line\n']
524
515
            f.add_lines(new_version, [next_parent], text)
525
516
            next_parent = new_version
526
 
        next_parent = b'base'
527
 
        text_name = b'chain2-'
528
 
        text = [b'line\n']
 
517
        next_parent = 'base'
 
518
        text_name = 'chain2-'
 
519
        text = ['line\n']
529
520
        for depth in range(26):
530
 
            new_version = text_name + b'%d' % depth
531
 
            text = text + [b'line\n']
 
521
            new_version = text_name + '%s' % depth
 
522
            text = text + ['line\n']
532
523
            f.add_lines(new_version, [next_parent], text)
533
524
            next_parent = new_version
534
525
        return sha1s
536
527
    def test_ancestry(self):
537
528
        f = self.get_file()
538
529
        self.assertEqual([], f.get_ancestry([]))
539
 
        f.add_lines(b'r0', [], [b'a\n', b'b\n'])
540
 
        f.add_lines(b'r1', [b'r0'], [b'b\n', b'c\n'])
541
 
        f.add_lines(b'r2', [b'r0'], [b'b\n', b'c\n'])
542
 
        f.add_lines(b'r3', [b'r2'], [b'b\n', b'c\n'])
543
 
        f.add_lines(b'rM', [b'r1', b'r2'], [b'b\n', b'c\n'])
 
530
        f.add_lines('r0', [], ['a\n', 'b\n'])
 
531
        f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
 
532
        f.add_lines('r2', ['r0'], ['b\n', 'c\n'])
 
533
        f.add_lines('r3', ['r2'], ['b\n', 'c\n'])
 
534
        f.add_lines('rM', ['r1', 'r2'], ['b\n', 'c\n'])
544
535
        self.assertEqual([], f.get_ancestry([]))
545
 
        versions = f.get_ancestry([b'rM'])
 
536
        versions = f.get_ancestry(['rM'])
546
537
        # there are some possibilities:
547
538
        # r0 r1 r2 rM r3
548
539
        # r0 r1 r2 r3 rM
549
540
        # etc
550
541
        # so we check indexes
551
 
        r0 = versions.index(b'r0')
552
 
        r1 = versions.index(b'r1')
553
 
        r2 = versions.index(b'r2')
554
 
        self.assertFalse(b'r3' in versions)
555
 
        rM = versions.index(b'rM')
 
542
        r0 = versions.index('r0')
 
543
        r1 = versions.index('r1')
 
544
        r2 = versions.index('r2')
 
545
        self.assertFalse('r3' in versions)
 
546
        rM = versions.index('rM')
556
547
        self.assertTrue(r0 < r1)
557
548
        self.assertTrue(r0 < r2)
558
549
        self.assertTrue(r1 < rM)
559
550
        self.assertTrue(r2 < rM)
560
551
 
561
552
        self.assertRaises(RevisionNotPresent,
562
 
                          f.get_ancestry, [b'rM', b'rX'])
 
553
            f.get_ancestry, ['rM', 'rX'])
563
554
 
564
 
        self.assertEqual(set(f.get_ancestry(b'rM')),
565
 
                         set(f.get_ancestry(b'rM', topo_sorted=False)))
 
555
        self.assertEqual(set(f.get_ancestry('rM')),
 
556
            set(f.get_ancestry('rM', topo_sorted=False)))
566
557
 
567
558
    def test_mutate_after_finish(self):
568
559
        self._transaction = 'before'
569
560
        f = self.get_file()
570
561
        self._transaction = 'after'
571
 
        self.assertRaises(errors.OutSideTransaction, f.add_lines, b'', [], [])
572
 
        self.assertRaises(errors.OutSideTransaction,
573
 
                          f.add_lines_with_ghosts, b'', [], [])
 
562
        self.assertRaises(errors.OutSideTransaction, f.add_lines, '', [], [])
 
563
        self.assertRaises(errors.OutSideTransaction, f.add_lines_with_ghosts, '', [], [])
574
564
 
575
565
    def test_copy_to(self):
576
566
        f = self.get_file()
577
 
        f.add_lines(b'0', [], [b'a\n'])
 
567
        f.add_lines('0', [], ['a\n'])
578
568
        t = MemoryTransport()
579
569
        f.copy_to('foo', t)
580
570
        for suffix in self.get_factory().get_suffixes():
587
577
 
588
578
    def test_get_parent_map(self):
589
579
        f = self.get_file()
590
 
        f.add_lines(b'r0', [], [b'a\n', b'b\n'])
591
 
        self.assertEqual(
592
 
            {b'r0': ()}, f.get_parent_map([b'r0']))
593
 
        f.add_lines(b'r1', [b'r0'], [b'a\n', b'b\n'])
594
 
        self.assertEqual(
595
 
            {b'r1': (b'r0',)}, f.get_parent_map([b'r1']))
596
 
        self.assertEqual(
597
 
            {b'r0': (),
598
 
             b'r1': (b'r0',)},
599
 
            f.get_parent_map([b'r0', b'r1']))
600
 
        f.add_lines(b'r2', [], [b'a\n', b'b\n'])
601
 
        f.add_lines(b'r3', [], [b'a\n', b'b\n'])
602
 
        f.add_lines(b'm', [b'r0', b'r1', b'r2', b'r3'], [b'a\n', b'b\n'])
603
 
        self.assertEqual(
604
 
            {b'm': (b'r0', b'r1', b'r2', b'r3')}, f.get_parent_map([b'm']))
605
 
        self.assertEqual({}, f.get_parent_map(b'y'))
606
 
        self.assertEqual(
607
 
            {b'r0': (),
608
 
             b'r1': (b'r0',)},
609
 
            f.get_parent_map([b'r0', b'y', b'r1']))
 
580
        f.add_lines('r0', [], ['a\n', 'b\n'])
 
581
        self.assertEqual(
 
582
            {'r0':()}, f.get_parent_map(['r0']))
 
583
        f.add_lines('r1', ['r0'], ['a\n', 'b\n'])
 
584
        self.assertEqual(
 
585
            {'r1':('r0',)}, f.get_parent_map(['r1']))
 
586
        self.assertEqual(
 
587
            {'r0':(),
 
588
             'r1':('r0',)},
 
589
            f.get_parent_map(['r0', 'r1']))
 
590
        f.add_lines('r2', [], ['a\n', 'b\n'])
 
591
        f.add_lines('r3', [], ['a\n', 'b\n'])
 
592
        f.add_lines('m', ['r0', 'r1', 'r2', 'r3'], ['a\n', 'b\n'])
 
593
        self.assertEqual(
 
594
            {'m':('r0', 'r1', 'r2', 'r3')}, f.get_parent_map(['m']))
 
595
        self.assertEqual({}, f.get_parent_map('y'))
 
596
        self.assertEqual(
 
597
            {'r0':(),
 
598
             'r1':('r0',)},
 
599
            f.get_parent_map(['r0', 'y', 'r1']))
610
600
 
611
601
    def test_annotate(self):
612
602
        f = self.get_file()
613
 
        f.add_lines(b'r0', [], [b'a\n', b'b\n'])
614
 
        f.add_lines(b'r1', [b'r0'], [b'c\n', b'b\n'])
615
 
        origins = f.annotate(b'r1')
616
 
        self.assertEqual(origins[0][0], b'r1')
617
 
        self.assertEqual(origins[1][0], b'r0')
 
603
        f.add_lines('r0', [], ['a\n', 'b\n'])
 
604
        f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
 
605
        origins = f.annotate('r1')
 
606
        self.assertEqual(origins[0][0], 'r1')
 
607
        self.assertEqual(origins[1][0], 'r0')
618
608
 
619
609
        self.assertRaises(RevisionNotPresent,
620
 
                          f.annotate, b'foo')
 
610
            f.annotate, 'foo')
621
611
 
622
612
    def test_detection(self):
623
613
        # Test weaves detect corruption.
628
618
 
629
619
        w = self.get_file_corrupted_text()
630
620
 
631
 
        self.assertEqual(b'hello\n', w.get_text(b'v1'))
632
 
        self.assertRaises(WeaveInvalidChecksum, w.get_text, b'v2')
633
 
        self.assertRaises(WeaveInvalidChecksum, w.get_lines, b'v2')
 
621
        self.assertEqual('hello\n', w.get_text('v1'))
 
622
        self.assertRaises(WeaveInvalidChecksum, w.get_text, 'v2')
 
623
        self.assertRaises(WeaveInvalidChecksum, w.get_lines, 'v2')
634
624
        self.assertRaises(WeaveInvalidChecksum, w.check)
635
625
 
636
626
        w = self.get_file_corrupted_checksum()
637
627
 
638
 
        self.assertEqual(b'hello\n', w.get_text(b'v1'))
639
 
        self.assertRaises(WeaveInvalidChecksum, w.get_text, b'v2')
640
 
        self.assertRaises(WeaveInvalidChecksum, w.get_lines, b'v2')
 
628
        self.assertEqual('hello\n', w.get_text('v1'))
 
629
        self.assertRaises(WeaveInvalidChecksum, w.get_text, 'v2')
 
630
        self.assertRaises(WeaveInvalidChecksum, w.get_lines, 'v2')
641
631
        self.assertRaises(WeaveInvalidChecksum, w.check)
642
632
 
643
633
    def get_file_corrupted_text(self):
665
655
 
666
656
        vf = self.get_file()
667
657
        # add a base to get included
668
 
        vf.add_lines(b'base', [], [b'base\n'])
 
658
        vf.add_lines('base', [], ['base\n'])
669
659
        # add a ancestor to be included on one side
670
 
        vf.add_lines(b'lancestor', [], [b'lancestor\n'])
 
660
        vf.add_lines('lancestor', [], ['lancestor\n'])
671
661
        # add a ancestor to be included on the other side
672
 
        vf.add_lines(b'rancestor', [b'base'], [b'rancestor\n'])
 
662
        vf.add_lines('rancestor', ['base'], ['rancestor\n'])
673
663
        # add a child of rancestor with no eofile-nl
674
 
        vf.add_lines(b'child', [b'rancestor'], [b'base\n', b'child\n'])
 
664
        vf.add_lines('child', ['rancestor'], ['base\n', 'child\n'])
675
665
        # add a child of lancestor and base to join the two roots
676
 
        vf.add_lines(b'otherchild',
677
 
                     [b'lancestor', b'base'],
678
 
                     [b'base\n', b'lancestor\n', b'otherchild\n'])
679
 
 
 
666
        vf.add_lines('otherchild',
 
667
                     ['lancestor', 'base'],
 
668
                     ['base\n', 'lancestor\n', 'otherchild\n'])
680
669
        def iter_with_versions(versions, expected):
681
670
            # now we need to see what lines are returned, and how often.
682
671
            lines = {}
683
672
            progress = InstrumentedProgress()
684
673
            # iterate over the lines
685
674
            for line in vf.iter_lines_added_or_present_in_versions(versions,
686
 
                                                                   pb=progress):
 
675
                pb=progress):
687
676
                lines.setdefault(line, 0)
688
677
                lines[line] += 1
689
 
            if [] != progress.updates:
 
678
            if []!= progress.updates:
690
679
                self.assertEqual(expected, progress.updates)
691
680
            return lines
692
 
        lines = iter_with_versions([b'child', b'otherchild'],
 
681
        lines = iter_with_versions(['child', 'otherchild'],
693
682
                                   [('Walking content', 0, 2),
694
683
                                    ('Walking content', 1, 2),
695
684
                                    ('Walking content', 2, 2)])
696
685
        # we must see child and otherchild
697
 
        self.assertTrue(lines[(b'child\n', b'child')] > 0)
698
 
        self.assertTrue(lines[(b'otherchild\n', b'otherchild')] > 0)
 
686
        self.assertTrue(lines[('child\n', 'child')] > 0)
 
687
        self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
699
688
        # we dont care if we got more than that.
700
689
 
701
690
        # test all lines
706
695
                                          ('Walking content', 4, 5),
707
696
                                          ('Walking content', 5, 5)])
708
697
        # all lines must be seen at least once
709
 
        self.assertTrue(lines[(b'base\n', b'base')] > 0)
710
 
        self.assertTrue(lines[(b'lancestor\n', b'lancestor')] > 0)
711
 
        self.assertTrue(lines[(b'rancestor\n', b'rancestor')] > 0)
712
 
        self.assertTrue(lines[(b'child\n', b'child')] > 0)
713
 
        self.assertTrue(lines[(b'otherchild\n', b'otherchild')] > 0)
 
698
        self.assertTrue(lines[('base\n', 'base')] > 0)
 
699
        self.assertTrue(lines[('lancestor\n', 'lancestor')] > 0)
 
700
        self.assertTrue(lines[('rancestor\n', 'rancestor')] > 0)
 
701
        self.assertTrue(lines[('child\n', 'child')] > 0)
 
702
        self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
714
703
 
715
704
    def test_add_lines_with_ghosts(self):
716
705
        # some versioned file formats allow lines to be added with parent
723
712
        parent_id_unicode = u'b\xbfse'
724
713
        parent_id_utf8 = parent_id_unicode.encode('utf8')
725
714
        try:
726
 
            vf.add_lines_with_ghosts(b'notbxbfse', [parent_id_utf8], [])
 
715
            vf.add_lines_with_ghosts('notbxbfse', [parent_id_utf8], [])
727
716
        except NotImplementedError:
728
717
            # check the other ghost apis are also not implemented
729
 
            self.assertRaises(NotImplementedError,
730
 
                              vf.get_ancestry_with_ghosts, [b'foo'])
731
 
            self.assertRaises(NotImplementedError,
732
 
                              vf.get_parents_with_ghosts, b'foo')
 
718
            self.assertRaises(NotImplementedError, vf.get_ancestry_with_ghosts, ['foo'])
 
719
            self.assertRaises(NotImplementedError, vf.get_parents_with_ghosts, 'foo')
733
720
            return
734
721
        vf = self.reopen_file()
735
722
        # test key graph related apis: getncestry, _graph, get_parents
736
723
        # has_version
737
724
        # - these are ghost unaware and must not be reflect ghosts
738
 
        self.assertEqual([b'notbxbfse'], vf.get_ancestry(b'notbxbfse'))
 
725
        self.assertEqual(['notbxbfse'], vf.get_ancestry('notbxbfse'))
739
726
        self.assertFalse(vf.has_version(parent_id_utf8))
740
727
        # we have _with_ghost apis to give us ghost information.
741
 
        self.assertEqual([parent_id_utf8, b'notbxbfse'],
742
 
                         vf.get_ancestry_with_ghosts([b'notbxbfse']))
743
 
        self.assertEqual([parent_id_utf8],
744
 
                         vf.get_parents_with_ghosts(b'notbxbfse'))
 
728
        self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry_with_ghosts(['notbxbfse']))
 
729
        self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
745
730
        # if we add something that is a ghost of another, it should correct the
746
731
        # results of the prior apis
747
732
        vf.add_lines(parent_id_utf8, [], [])
748
 
        self.assertEqual([parent_id_utf8, b'notbxbfse'],
749
 
                         vf.get_ancestry([b'notbxbfse']))
750
 
        self.assertEqual({b'notbxbfse': (parent_id_utf8,)},
751
 
                         vf.get_parent_map([b'notbxbfse']))
 
733
        self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry(['notbxbfse']))
 
734
        self.assertEqual({'notbxbfse':(parent_id_utf8,)},
 
735
            vf.get_parent_map(['notbxbfse']))
752
736
        self.assertTrue(vf.has_version(parent_id_utf8))
753
737
        # we have _with_ghost apis to give us ghost information.
754
 
        self.assertEqual([parent_id_utf8, b'notbxbfse'],
755
 
                         vf.get_ancestry_with_ghosts([b'notbxbfse']))
756
 
        self.assertEqual([parent_id_utf8],
757
 
                         vf.get_parents_with_ghosts(b'notbxbfse'))
 
738
        self.assertEqual([parent_id_utf8, 'notbxbfse'],
 
739
            vf.get_ancestry_with_ghosts(['notbxbfse']))
 
740
        self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
758
741
 
759
742
    def test_add_lines_with_ghosts_after_normal_revs(self):
760
743
        # some versioned file formats allow lines to be added with parent
764
747
        vf = self.get_file()
765
748
        # probe for ghost support
766
749
        try:
767
 
            vf.add_lines_with_ghosts(b'base', [], [b'line\n', b'line_b\n'])
 
750
            vf.add_lines_with_ghosts('base', [], ['line\n', 'line_b\n'])
768
751
        except NotImplementedError:
769
752
            return
770
 
        vf.add_lines_with_ghosts(b'references_ghost',
771
 
                                 [b'base', b'a_ghost'],
772
 
                                 [b'line\n', b'line_b\n', b'line_c\n'])
773
 
        origins = vf.annotate(b'references_ghost')
774
 
        self.assertEqual((b'base', b'line\n'), origins[0])
775
 
        self.assertEqual((b'base', b'line_b\n'), origins[1])
776
 
        self.assertEqual((b'references_ghost', b'line_c\n'), origins[2])
 
753
        vf.add_lines_with_ghosts('references_ghost',
 
754
                                 ['base', 'a_ghost'],
 
755
                                 ['line\n', 'line_b\n', 'line_c\n'])
 
756
        origins = vf.annotate('references_ghost')
 
757
        self.assertEqual(('base', 'line\n'), origins[0])
 
758
        self.assertEqual(('base', 'line_b\n'), origins[1])
 
759
        self.assertEqual(('references_ghost', 'line_c\n'), origins[2])
777
760
 
778
761
    def test_readonly_mode(self):
779
762
        t = self.get_transport()
780
763
        factory = self.get_factory()
781
764
        vf = factory('id', t, 0o777, create=True, access_mode='w')
782
765
        vf = factory('id', t, access_mode='r')
783
 
        self.assertRaises(errors.ReadOnlyError, vf.add_lines, b'base', [], [])
 
766
        self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
784
767
        self.assertRaises(errors.ReadOnlyError,
785
768
                          vf.add_lines_with_ghosts,
786
 
                          b'base',
 
769
                          'base',
787
770
                          [],
788
771
                          [])
789
772
 
791
774
        # check the sha1 data is available
792
775
        vf = self.get_file()
793
776
        # a simple file
794
 
        vf.add_lines(b'a', [], [b'a\n'])
 
777
        vf.add_lines('a', [], ['a\n'])
795
778
        # the same file, different metadata
796
 
        vf.add_lines(b'b', [b'a'], [b'a\n'])
 
779
        vf.add_lines('b', ['a'], ['a\n'])
797
780
        # a file differing only in last newline.
798
 
        vf.add_lines(b'c', [], [b'a'])
 
781
        vf.add_lines('c', [], ['a'])
799
782
        self.assertEqual({
800
 
            b'a': b'3f786850e387550fdab836ed7e6dc881de23001b',
801
 
            b'c': b'86f7e437faa5a7fce15d1ddcb9eaeaea377667b8',
802
 
            b'b': b'3f786850e387550fdab836ed7e6dc881de23001b',
 
783
            'a': '3f786850e387550fdab836ed7e6dc881de23001b',
 
784
            'c': '86f7e437faa5a7fce15d1ddcb9eaeaea377667b8',
 
785
            'b': '3f786850e387550fdab836ed7e6dc881de23001b',
803
786
            },
804
 
            vf.get_sha1s([b'a', b'c', b'b']))
 
787
            vf.get_sha1s(['a', 'c', 'b']))
805
788
 
806
789
 
807
790
class TestWeave(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
815
798
        w = WeaveFile('foo', self.get_transport(),
816
799
                      create=True,
817
800
                      get_scope=self.get_transaction)
818
 
        w.add_lines(b'v1', [], [b'hello\n'])
819
 
        w.add_lines(b'v2', [b'v1'], [b'hello\n', b'there\n'])
 
801
        w.add_lines('v1', [], ['hello\n'])
 
802
        w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
820
803
 
821
804
        # We are going to invasively corrupt the text
822
805
        # Make sure the internals of weave are the same
823
 
        self.assertEqual([(b'{', 0), b'hello\n', (b'}', None), (b'{', 1), b'there\n', (b'}', None)
824
 
                          ], w._weave)
 
806
        self.assertEqual([('{', 0)
 
807
                        , 'hello\n'
 
808
                        , ('}', None)
 
809
                        , ('{', 1)
 
810
                        , 'there\n'
 
811
                        , ('}', None)
 
812
                        ], w._weave)
825
813
 
826
 
        self.assertEqual([b'f572d396fae9206628714fb2ce00f72e94f2258f', b'90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
827
 
                          ], w._sha1s)
 
814
        self.assertEqual(['f572d396fae9206628714fb2ce00f72e94f2258f'
 
815
                        , '90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
 
816
                        ], w._sha1s)
828
817
        w.check()
829
818
 
830
819
        # Corrupted
831
 
        w._weave[4] = b'There\n'
 
820
        w._weave[4] = 'There\n'
832
821
        return w
833
822
 
834
823
    def get_file_corrupted_checksum(self):
835
824
        w = self.get_file_corrupted_text()
836
825
        # Corrected
837
 
        w._weave[4] = b'there\n'
838
 
        self.assertEqual(b'hello\nthere\n', w.get_text(b'v2'))
 
826
        w._weave[4] = 'there\n'
 
827
        self.assertEqual('hello\nthere\n', w.get_text('v2'))
839
828
 
840
 
        # Invalid checksum, first digit changed
841
 
        w._sha1s[1] = b'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'
 
829
        #Invalid checksum, first digit changed
 
830
        w._sha1s[1] =  'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'
842
831
        return w
843
832
 
844
833
    def reopen_file(self, name='foo', create=False):
869
858
        self.plan_merge_vf.fallback_versionedfiles.extend([self.vf1, self.vf2])
870
859
 
871
860
    def test_add_lines(self):
872
 
        self.plan_merge_vf.add_lines((b'root', b'a:'), [], [])
873
 
        self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
874
 
                          (b'root', b'a'), [], [])
875
 
        self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
876
 
                          (b'root', b'a:'), None, [])
877
 
        self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
878
 
                          (b'root', b'a:'), [], None)
 
861
        self.plan_merge_vf.add_lines(('root', 'a:'), [], [])
 
862
        self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
 
863
            ('root', 'a'), [], [])
 
864
        self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
 
865
            ('root', 'a:'), None, [])
 
866
        self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
 
867
            ('root', 'a:'), [], None)
879
868
 
880
869
    def setup_abcde(self):
881
 
        self.vf1.add_lines((b'root', b'A'), [], [b'a'])
882
 
        self.vf1.add_lines((b'root', b'B'), [(b'root', b'A')], [b'b'])
883
 
        self.vf2.add_lines((b'root', b'C'), [], [b'c'])
884
 
        self.vf2.add_lines((b'root', b'D'), [(b'root', b'C')], [b'd'])
885
 
        self.plan_merge_vf.add_lines((b'root', b'E:'),
886
 
                                     [(b'root', b'B'), (b'root', b'D')], [b'e'])
 
870
        self.vf1.add_lines(('root', 'A'), [], ['a'])
 
871
        self.vf1.add_lines(('root', 'B'), [('root', 'A')], ['b'])
 
872
        self.vf2.add_lines(('root', 'C'), [], ['c'])
 
873
        self.vf2.add_lines(('root', 'D'), [('root', 'C')], ['d'])
 
874
        self.plan_merge_vf.add_lines(('root', 'E:'),
 
875
            [('root', 'B'), ('root', 'D')], ['e'])
887
876
 
888
877
    def test_get_parents(self):
889
878
        self.setup_abcde()
890
 
        self.assertEqual({(b'root', b'B'): ((b'root', b'A'),)},
891
 
                         self.plan_merge_vf.get_parent_map([(b'root', b'B')]))
892
 
        self.assertEqual({(b'root', b'D'): ((b'root', b'C'),)},
893
 
                         self.plan_merge_vf.get_parent_map([(b'root', b'D')]))
894
 
        self.assertEqual({(b'root', b'E:'): ((b'root', b'B'), (b'root', b'D'))},
895
 
                         self.plan_merge_vf.get_parent_map([(b'root', b'E:')]))
 
879
        self.assertEqual({('root', 'B'):(('root', 'A'),)},
 
880
            self.plan_merge_vf.get_parent_map([('root', 'B')]))
 
881
        self.assertEqual({('root', 'D'):(('root', 'C'),)},
 
882
            self.plan_merge_vf.get_parent_map([('root', 'D')]))
 
883
        self.assertEqual({('root', 'E:'):(('root', 'B'), ('root', 'D'))},
 
884
            self.plan_merge_vf.get_parent_map([('root', 'E:')]))
896
885
        self.assertEqual({},
897
 
                         self.plan_merge_vf.get_parent_map([(b'root', b'F')]))
 
886
            self.plan_merge_vf.get_parent_map([('root', 'F')]))
898
887
        self.assertEqual({
899
 
            (b'root', b'B'): ((b'root', b'A'),),
900
 
            (b'root', b'D'): ((b'root', b'C'),),
901
 
            (b'root', b'E:'): ((b'root', b'B'), (b'root', b'D')),
902
 
            },
 
888
                ('root', 'B'): (('root', 'A'),),
 
889
                ('root', 'D'): (('root', 'C'),),
 
890
                ('root', 'E:'): (('root', 'B'), ('root', 'D')),
 
891
                },
903
892
            self.plan_merge_vf.get_parent_map(
904
 
                [(b'root', b'B'), (b'root', b'D'), (b'root', b'E:'), (b'root', b'F')]))
 
893
                [('root', 'B'), ('root', 'D'), ('root', 'E:'), ('root', 'F')]))
905
894
 
906
895
    def test_get_record_stream(self):
907
896
        self.setup_abcde()
908
 
 
909
897
        def get_record(suffix):
910
898
            return next(self.plan_merge_vf.get_record_stream(
911
 
                [(b'root', suffix)], 'unordered', True))
912
 
        self.assertEqual(b'a', get_record(b'A').get_bytes_as('fulltext'))
913
 
        self.assertEqual(b'a', b''.join(get_record(b'A').iter_bytes_as('chunked')))
914
 
        self.assertEqual(b'c', get_record(b'C').get_bytes_as('fulltext'))
915
 
        self.assertEqual(b'e', get_record(b'E:').get_bytes_as('fulltext'))
 
899
                [('root', suffix)], 'unordered', True))
 
900
        self.assertEqual('a', get_record('A').get_bytes_as('fulltext'))
 
901
        self.assertEqual('c', get_record('C').get_bytes_as('fulltext'))
 
902
        self.assertEqual('e', get_record('E:').get_bytes_as('fulltext'))
916
903
        self.assertEqual('absent', get_record('F').storage_kind)
917
904
 
918
905
 
926
913
        vf = self.get_file()
927
914
        # try an empty file access
928
915
        readonly_vf = self.get_factory()('foo',
929
 
                                         transport.get_transport_from_url(self.get_readonly_url('.')))
 
916
            transport.get_transport_from_url(self.get_readonly_url('.')))
930
917
        self.assertEqual([], readonly_vf.versions())
931
918
 
932
919
    def test_readonly_http_works_with_feeling(self):
933
920
        # we should be able to read from http with a versioned file.
934
921
        vf = self.get_file()
935
922
        # now with feeling.
936
 
        vf.add_lines(b'1', [], [b'a\n'])
937
 
        vf.add_lines(b'2', [b'1'], [b'b\n', b'a\n'])
 
923
        vf.add_lines('1', [], ['a\n'])
 
924
        vf.add_lines('2', ['1'], ['b\n', 'a\n'])
938
925
        readonly_vf = self.get_factory()('foo',
939
 
                                         transport.get_transport_from_url(self.get_readonly_url('.')))
940
 
        self.assertEqual([b'1', b'2'], vf.versions())
941
 
        self.assertEqual([b'1', b'2'], readonly_vf.versions())
 
926
            transport.get_transport_from_url(self.get_readonly_url('.')))
 
927
        self.assertEqual(['1', '2'], vf.versions())
 
928
        self.assertEqual(['1', '2'], readonly_vf.versions())
942
929
        for version in readonly_vf.versions():
943
930
            readonly_vf.get_lines(version)
944
931
 
960
947
        from textwrap import dedent
961
948
 
962
949
        def addcrlf(x):
963
 
            return x + b'\n'
 
950
            return x + '\n'
964
951
 
965
952
        w = self.get_file()
966
 
        w.add_lines(b'text0', [], list(map(addcrlf, base)))
967
 
        w.add_lines(b'text1', [b'text0'], list(map(addcrlf, a)))
968
 
        w.add_lines(b'text2', [b'text0'], list(map(addcrlf, b)))
 
953
        w.add_lines('text0', [], list(map(addcrlf, base)))
 
954
        w.add_lines('text1', ['text0'], list(map(addcrlf, a)))
 
955
        w.add_lines('text2', ['text0'], list(map(addcrlf, b)))
969
956
 
970
957
        self.log_contents(w)
971
958
 
972
959
        self.log('merge plan:')
973
 
        p = list(w.plan_merge(b'text1', b'text2'))
 
960
        p = list(w.plan_merge('text1', 'text2'))
974
961
        for state, line in p:
975
962
            if line:
976
963
                self.log('%12s | %s' % (state, line[:-1]))
984
971
        mp = list(map(addcrlf, mp))
985
972
        self.assertEqual(mt.readlines(), mp)
986
973
 
 
974
 
987
975
    def testOneInsert(self):
988
976
        self.doMerge([],
989
 
                     [b'aa'],
 
977
                     ['aa'],
990
978
                     [],
991
 
                     [b'aa'])
 
979
                     ['aa'])
992
980
 
993
981
    def testSeparateInserts(self):
994
 
        self.doMerge([b'aaa', b'bbb', b'ccc'],
995
 
                     [b'aaa', b'xxx', b'bbb', b'ccc'],
996
 
                     [b'aaa', b'bbb', b'yyy', b'ccc'],
997
 
                     [b'aaa', b'xxx', b'bbb', b'yyy', b'ccc'])
 
982
        self.doMerge(['aaa', 'bbb', 'ccc'],
 
983
                     ['aaa', 'xxx', 'bbb', 'ccc'],
 
984
                     ['aaa', 'bbb', 'yyy', 'ccc'],
 
985
                     ['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
998
986
 
999
987
    def testSameInsert(self):
1000
 
        self.doMerge([b'aaa', b'bbb', b'ccc'],
1001
 
                     [b'aaa', b'xxx', b'bbb', b'ccc'],
1002
 
                     [b'aaa', b'xxx', b'bbb', b'yyy', b'ccc'],
1003
 
                     [b'aaa', b'xxx', b'bbb', b'yyy', b'ccc'])
1004
 
    overlappedInsertExpected = [b'aaa', b'xxx', b'yyy', b'bbb']
1005
 
 
 
988
        self.doMerge(['aaa', 'bbb', 'ccc'],
 
989
                     ['aaa', 'xxx', 'bbb', 'ccc'],
 
990
                     ['aaa', 'xxx', 'bbb', 'yyy', 'ccc'],
 
991
                     ['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
 
992
    overlappedInsertExpected = ['aaa', 'xxx', 'yyy', 'bbb']
1006
993
    def testOverlappedInsert(self):
1007
 
        self.doMerge([b'aaa', b'bbb'],
1008
 
                     [b'aaa', b'xxx', b'yyy', b'bbb'],
1009
 
                     [b'aaa', b'xxx', b'bbb'], self.overlappedInsertExpected)
 
994
        self.doMerge(['aaa', 'bbb'],
 
995
                     ['aaa', 'xxx', 'yyy', 'bbb'],
 
996
                     ['aaa', 'xxx', 'bbb'], self.overlappedInsertExpected)
1010
997
 
1011
998
        # really it ought to reduce this to
1012
 
        # [b'aaa', b'xxx', b'yyy', b'bbb']
 
999
        # ['aaa', 'xxx', 'yyy', 'bbb']
 
1000
 
1013
1001
 
1014
1002
    def testClashReplace(self):
1015
 
        self.doMerge([b'aaa'],
1016
 
                     [b'xxx'],
1017
 
                     [b'yyy', b'zzz'],
1018
 
                     [b'<<<<<<< ', b'xxx', b'=======', b'yyy', b'zzz',
1019
 
                      b'>>>>>>> '])
 
1003
        self.doMerge(['aaa'],
 
1004
                     ['xxx'],
 
1005
                     ['yyy', 'zzz'],
 
1006
                     ['<<<<<<< ', 'xxx', '=======', 'yyy', 'zzz',
 
1007
                      '>>>>>>> '])
1020
1008
 
1021
1009
    def testNonClashInsert1(self):
1022
 
        self.doMerge([b'aaa'],
1023
 
                     [b'xxx', b'aaa'],
1024
 
                     [b'yyy', b'zzz'],
1025
 
                     [b'<<<<<<< ', b'xxx', b'aaa', b'=======', b'yyy', b'zzz',
1026
 
                      b'>>>>>>> '])
 
1010
        self.doMerge(['aaa'],
 
1011
                     ['xxx', 'aaa'],
 
1012
                     ['yyy', 'zzz'],
 
1013
                     ['<<<<<<< ', 'xxx', 'aaa', '=======', 'yyy', 'zzz',
 
1014
                      '>>>>>>> '])
1027
1015
 
1028
1016
    def testNonClashInsert2(self):
1029
 
        self.doMerge([b'aaa'],
1030
 
                     [b'aaa'],
1031
 
                     [b'yyy', b'zzz'],
1032
 
                     [b'yyy', b'zzz'])
 
1017
        self.doMerge(['aaa'],
 
1018
                     ['aaa'],
 
1019
                     ['yyy', 'zzz'],
 
1020
                     ['yyy', 'zzz'])
 
1021
 
1033
1022
 
1034
1023
    def testDeleteAndModify(self):
1035
1024
        """Clashing delete and modification.
1042
1031
        # skippd, not working yet
1043
1032
        return
1044
1033
 
1045
 
        self.doMerge([b'aaa', b'bbb', b'ccc'],
1046
 
                     [b'aaa', b'ddd', b'ccc'],
1047
 
                     [b'aaa', b'ccc'],
1048
 
                     [b'<<<<<<<< ', b'aaa', b'=======', b'>>>>>>> ', b'ccc'])
 
1034
        self.doMerge(['aaa', 'bbb', 'ccc'],
 
1035
                     ['aaa', 'ddd', 'ccc'],
 
1036
                     ['aaa', 'ccc'],
 
1037
                     ['<<<<<<<< ', 'aaa', '=======', '>>>>>>> ', 'ccc'])
1049
1038
 
1050
1039
    def _test_merge_from_strings(self, base, a, b, expected):
1051
1040
        w = self.get_file()
1052
 
        w.add_lines(b'text0', [], base.splitlines(True))
1053
 
        w.add_lines(b'text1', [b'text0'], a.splitlines(True))
1054
 
        w.add_lines(b'text2', [b'text0'], b.splitlines(True))
 
1041
        w.add_lines('text0', [], base.splitlines(True))
 
1042
        w.add_lines('text1', ['text0'], a.splitlines(True))
 
1043
        w.add_lines('text2', ['text0'], b.splitlines(True))
1055
1044
        self.log('merge plan:')
1056
 
        p = list(w.plan_merge(b'text1', b'text2'))
 
1045
        p = list(w.plan_merge('text1', 'text2'))
1057
1046
        for state, line in p:
1058
1047
            if line:
1059
1048
                self.log('%12s | %s' % (state, line[:-1]))
1060
1049
        self.log('merge result:')
1061
 
        result_text = b''.join(w.weave_merge(p))
 
1050
        result_text = ''.join(w.weave_merge(p))
1062
1051
        self.log(result_text)
1063
1052
        self.assertEqualDiff(result_text, expected)
1064
1053
 
1065
1054
    def test_weave_merge_conflicts(self):
1066
1055
        # does weave merge properly handle plans that end with unchanged?
1067
 
        result = b''.join(self.get_file().weave_merge([('new-a', b'hello\n')]))
1068
 
        self.assertEqual(result, b'hello\n')
 
1056
        result = ''.join(self.get_file().weave_merge([('new-a', 'hello\n')]))
 
1057
        self.assertEqual(result, 'hello\n')
1069
1058
 
1070
1059
    def test_deletion_extended(self):
1071
1060
        """One side deletes, the other deletes more.
1072
1061
        """
1073
 
        base = b"""\
 
1062
        base = """\
1074
1063
            line 1
1075
1064
            line 2
1076
1065
            line 3
1077
1066
            """
1078
 
        a = b"""\
 
1067
        a = """\
1079
1068
            line 1
1080
1069
            line 2
1081
1070
            """
1082
 
        b = b"""\
 
1071
        b = """\
1083
1072
            line 1
1084
1073
            """
1085
 
        result = b"""\
 
1074
        result = """\
1086
1075
            line 1
1087
1076
<<<<<<<\x20
1088
1077
            line 2
1097
1086
        Arguably it'd be better to treat these as agreement, rather than
1098
1087
        conflict, but for now conflict is safer.
1099
1088
        """
1100
 
        base = b"""\
 
1089
        base = """\
1101
1090
            start context
1102
1091
            int a() {}
1103
1092
            int b() {}
1104
1093
            int c() {}
1105
1094
            end context
1106
1095
            """
1107
 
        a = b"""\
 
1096
        a = """\
1108
1097
            start context
1109
1098
            int a() {}
1110
1099
            end context
1111
1100
            """
1112
 
        b = b"""\
 
1101
        b = """\
1113
1102
            start context
1114
1103
            int c() {}
1115
1104
            end context
1116
1105
            """
1117
 
        result = b"""\
 
1106
        result = """\
1118
1107
            start context
1119
1108
<<<<<<<\x20
1120
1109
            int a() {}
1127
1116
 
1128
1117
    def test_agreement_deletion(self):
1129
1118
        """Agree to delete some lines, without conflicts."""
1130
 
        base = b"""\
 
1119
        base = """\
1131
1120
            start context
1132
1121
            base line 1
1133
1122
            base line 2
1134
1123
            end context
1135
1124
            """
1136
 
        a = b"""\
1137
 
            start context
1138
 
            base line 1
1139
 
            end context
1140
 
            """
1141
 
        b = b"""\
1142
 
            start context
1143
 
            base line 1
1144
 
            end context
1145
 
            """
1146
 
        result = b"""\
 
1125
        a = """\
 
1126
            start context
 
1127
            base line 1
 
1128
            end context
 
1129
            """
 
1130
        b = """\
 
1131
            start context
 
1132
            base line 1
 
1133
            end context
 
1134
            """
 
1135
        result = """\
1147
1136
            start context
1148
1137
            base line 1
1149
1138
            end context
1160
1149
 
1161
1150
        It's better to consider the whole thing as a disagreement region.
1162
1151
        """
1163
 
        base = b"""\
 
1152
        base = """\
1164
1153
            start context
1165
1154
            base line 1
1166
1155
            base line 2
1167
1156
            end context
1168
1157
            """
1169
 
        a = b"""\
 
1158
        a = """\
1170
1159
            start context
1171
1160
            base line 1
1172
1161
            a's replacement line 2
1173
1162
            end context
1174
1163
            """
1175
 
        b = b"""\
 
1164
        b = """\
1176
1165
            start context
1177
1166
            b replaces
1178
1167
            both lines
1179
1168
            end context
1180
1169
            """
1181
 
        result = b"""\
 
1170
        result = """\
1182
1171
            start context
1183
1172
<<<<<<<\x20
1184
1173
            base line 1
1204
1193
        write_weave(w, tmpf)
1205
1194
        self.log(tmpf.getvalue())
1206
1195
 
1207
 
    overlappedInsertExpected = [b'aaa', b'<<<<<<< ', b'xxx', b'yyy', b'=======',
1208
 
                                b'xxx', b'>>>>>>> ', b'bbb']
 
1196
    overlappedInsertExpected = ['aaa', '<<<<<<< ', 'xxx', 'yyy', '=======',
 
1197
                                'xxx', '>>>>>>> ', 'bbb']
1209
1198
 
1210
1199
 
1211
1200
class TestContentFactoryAdaption(TestCaseWithMemoryTransport):
1216
1205
        # Each is source_kind, requested_kind, adapter class
1217
1206
        scenarios = [
1218
1207
            ('knit-delta-gz', 'fulltext', _mod_knit.DeltaPlainToFullText),
1219
 
            ('knit-delta-gz', 'lines', _mod_knit.DeltaPlainToFullText),
1220
 
            ('knit-delta-gz', 'chunked', _mod_knit.DeltaPlainToFullText),
1221
1208
            ('knit-ft-gz', 'fulltext', _mod_knit.FTPlainToFullText),
1222
 
            ('knit-ft-gz', 'lines', _mod_knit.FTPlainToFullText),
1223
 
            ('knit-ft-gz', 'chunked', _mod_knit.FTPlainToFullText),
1224
1209
            ('knit-annotated-delta-gz', 'knit-delta-gz',
1225
1210
                _mod_knit.DeltaAnnotatedToUnannotated),
1226
1211
            ('knit-annotated-delta-gz', 'fulltext',
1229
1214
                _mod_knit.FTAnnotatedToUnannotated),
1230
1215
            ('knit-annotated-ft-gz', 'fulltext',
1231
1216
                _mod_knit.FTAnnotatedToFullText),
1232
 
            ('knit-annotated-ft-gz', 'lines',
1233
 
                _mod_knit.FTAnnotatedToFullText),
1234
 
            ('knit-annotated-ft-gz', 'chunked',
1235
 
                _mod_knit.FTAnnotatedToFullText),
1236
1217
            ]
1237
1218
        for source, requested, klass in scenarios:
1238
1219
            adapter_factory = versionedfile.adapter_registry.get(
1245
1226
        transport = self.get_transport()
1246
1227
        return make_file_factory(annotated, mapper)(transport)
1247
1228
 
1248
 
    def helpGetBytes(self, f, ft_name, ft_adapter, delta_name, delta_adapter):
 
1229
    def helpGetBytes(self, f, ft_adapter, delta_adapter):
1249
1230
        """Grab the interested adapted texts for tests."""
1250
1231
        # origin is a fulltext
1251
 
        entries = f.get_record_stream([(b'origin',)], 'unordered', False)
 
1232
        entries = f.get_record_stream([('origin',)], 'unordered', False)
1252
1233
        base = next(entries)
1253
 
        ft_data = ft_adapter.get_bytes(base, ft_name)
 
1234
        ft_data = ft_adapter.get_bytes(base)
1254
1235
        # merged is both a delta and multiple parents.
1255
 
        entries = f.get_record_stream([(b'merged',)], 'unordered', False)
 
1236
        entries = f.get_record_stream([('merged',)], 'unordered', False)
1256
1237
        merged = next(entries)
1257
 
        delta_data = delta_adapter.get_bytes(merged, delta_name)
 
1238
        delta_data = delta_adapter.get_bytes(merged)
1258
1239
        return ft_data, delta_data
1259
1240
 
1260
1241
    def test_deannotation_noeol(self):
1262
1243
        # we need a full text, and a delta
1263
1244
        f = self.get_knit()
1264
1245
        get_diamond_files(f, 1, trailing_eol=False)
1265
 
        ft_data, delta_data = self.helpGetBytes(
1266
 
            f, 'knit-ft-gz', _mod_knit.FTAnnotatedToUnannotated(None),
1267
 
            'knit-delta-gz', _mod_knit.DeltaAnnotatedToUnannotated(None))
 
1246
        ft_data, delta_data = self.helpGetBytes(f,
 
1247
            _mod_knit.FTAnnotatedToUnannotated(None),
 
1248
            _mod_knit.DeltaAnnotatedToUnannotated(None))
1268
1249
        self.assertEqual(
1269
 
            b'version origin 1 b284f94827db1fa2970d9e2014f080413b547a7e\n'
1270
 
            b'origin\n'
1271
 
            b'end origin\n',
 
1250
            'version origin 1 b284f94827db1fa2970d9e2014f080413b547a7e\n'
 
1251
            'origin\n'
 
1252
            'end origin\n',
1272
1253
            GzipFile(mode='rb', fileobj=BytesIO(ft_data)).read())
1273
1254
        self.assertEqual(
1274
 
            b'version merged 4 32c2e79763b3f90e8ccde37f9710b6629c25a796\n'
1275
 
            b'1,2,3\nleft\nright\nmerged\nend merged\n',
 
1255
            'version merged 4 32c2e79763b3f90e8ccde37f9710b6629c25a796\n'
 
1256
            '1,2,3\nleft\nright\nmerged\nend merged\n',
1276
1257
            GzipFile(mode='rb', fileobj=BytesIO(delta_data)).read())
1277
1258
 
1278
1259
    def test_deannotation(self):
1280
1261
        # we need a full text, and a delta
1281
1262
        f = self.get_knit()
1282
1263
        get_diamond_files(f, 1)
1283
 
        ft_data, delta_data = self.helpGetBytes(
1284
 
            f, 'knit-ft-gz', _mod_knit.FTAnnotatedToUnannotated(None),
1285
 
            'knit-delta-gz', _mod_knit.DeltaAnnotatedToUnannotated(None))
 
1264
        ft_data, delta_data = self.helpGetBytes(f,
 
1265
            _mod_knit.FTAnnotatedToUnannotated(None),
 
1266
            _mod_knit.DeltaAnnotatedToUnannotated(None))
1286
1267
        self.assertEqual(
1287
 
            b'version origin 1 00e364d235126be43292ab09cb4686cf703ddc17\n'
1288
 
            b'origin\n'
1289
 
            b'end origin\n',
 
1268
            'version origin 1 00e364d235126be43292ab09cb4686cf703ddc17\n'
 
1269
            'origin\n'
 
1270
            'end origin\n',
1290
1271
            GzipFile(mode='rb', fileobj=BytesIO(ft_data)).read())
1291
1272
        self.assertEqual(
1292
 
            b'version merged 3 ed8bce375198ea62444dc71952b22cfc2b09226d\n'
1293
 
            b'2,2,2\nright\nmerged\nend merged\n',
 
1273
            'version merged 3 ed8bce375198ea62444dc71952b22cfc2b09226d\n'
 
1274
            '2,2,2\nright\nmerged\nend merged\n',
1294
1275
            GzipFile(mode='rb', fileobj=BytesIO(delta_data)).read())
1295
1276
 
1296
1277
    def test_annotated_to_fulltext_no_eol(self):
1301
1282
        # Reconstructing a full text requires a backing versioned file, and it
1302
1283
        # must have the base lines requested from it.
1303
1284
        logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1304
 
        ft_data, delta_data = self.helpGetBytes(
1305
 
            f, 'fulltext', _mod_knit.FTAnnotatedToFullText(None),
1306
 
            'fulltext', _mod_knit.DeltaAnnotatedToFullText(logged_vf))
1307
 
        self.assertEqual(b'origin', ft_data)
1308
 
        self.assertEqual(b'base\nleft\nright\nmerged', delta_data)
1309
 
        self.assertEqual([('get_record_stream', [(b'left',)], 'unordered',
1310
 
                           True)], logged_vf.calls)
 
1285
        ft_data, delta_data = self.helpGetBytes(f,
 
1286
            _mod_knit.FTAnnotatedToFullText(None),
 
1287
            _mod_knit.DeltaAnnotatedToFullText(logged_vf))
 
1288
        self.assertEqual('origin', ft_data)
 
1289
        self.assertEqual('base\nleft\nright\nmerged', delta_data)
 
1290
        self.assertEqual([('get_record_stream', [('left',)], 'unordered',
 
1291
            True)], logged_vf.calls)
1311
1292
 
1312
1293
    def test_annotated_to_fulltext(self):
1313
1294
        """Test adapting annotated knits to full texts (for -> weaves)."""
1317
1298
        # Reconstructing a full text requires a backing versioned file, and it
1318
1299
        # must have the base lines requested from it.
1319
1300
        logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1320
 
        ft_data, delta_data = self.helpGetBytes(
1321
 
            f, 'fulltext', _mod_knit.FTAnnotatedToFullText(None),
1322
 
            'fulltext', _mod_knit.DeltaAnnotatedToFullText(logged_vf))
1323
 
        self.assertEqual(b'origin\n', ft_data)
1324
 
        self.assertEqual(b'base\nleft\nright\nmerged\n', delta_data)
1325
 
        self.assertEqual([('get_record_stream', [(b'left',)], 'unordered',
1326
 
                           True)], logged_vf.calls)
 
1301
        ft_data, delta_data = self.helpGetBytes(f,
 
1302
            _mod_knit.FTAnnotatedToFullText(None),
 
1303
            _mod_knit.DeltaAnnotatedToFullText(logged_vf))
 
1304
        self.assertEqual('origin\n', ft_data)
 
1305
        self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
 
1306
        self.assertEqual([('get_record_stream', [('left',)], 'unordered',
 
1307
            True)], logged_vf.calls)
1327
1308
 
1328
1309
    def test_unannotated_to_fulltext(self):
1329
1310
        """Test adapting unannotated knits to full texts.
1336
1317
        # Reconstructing a full text requires a backing versioned file, and it
1337
1318
        # must have the base lines requested from it.
1338
1319
        logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1339
 
        ft_data, delta_data = self.helpGetBytes(
1340
 
            f, 'fulltext', _mod_knit.FTPlainToFullText(None),
1341
 
            'fulltext', _mod_knit.DeltaPlainToFullText(logged_vf))
1342
 
        self.assertEqual(b'origin\n', ft_data)
1343
 
        self.assertEqual(b'base\nleft\nright\nmerged\n', delta_data)
1344
 
        self.assertEqual([('get_record_stream', [(b'left',)], 'unordered',
1345
 
                           True)], logged_vf.calls)
 
1320
        ft_data, delta_data = self.helpGetBytes(f,
 
1321
            _mod_knit.FTPlainToFullText(None),
 
1322
            _mod_knit.DeltaPlainToFullText(logged_vf))
 
1323
        self.assertEqual('origin\n', ft_data)
 
1324
        self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
 
1325
        self.assertEqual([('get_record_stream', [('left',)], 'unordered',
 
1326
            True)], logged_vf.calls)
1346
1327
 
1347
1328
    def test_unannotated_to_fulltext_no_eol(self):
1348
1329
        """Test adapting unannotated knits to full texts.
1355
1336
        # Reconstructing a full text requires a backing versioned file, and it
1356
1337
        # must have the base lines requested from it.
1357
1338
        logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1358
 
        ft_data, delta_data = self.helpGetBytes(
1359
 
            f, 'fulltext', _mod_knit.FTPlainToFullText(None),
1360
 
            'fulltext', _mod_knit.DeltaPlainToFullText(logged_vf))
1361
 
        self.assertEqual(b'origin', ft_data)
1362
 
        self.assertEqual(b'base\nleft\nright\nmerged', delta_data)
1363
 
        self.assertEqual([('get_record_stream', [(b'left',)], 'unordered',
1364
 
                           True)], logged_vf.calls)
 
1339
        ft_data, delta_data = self.helpGetBytes(f,
 
1340
            _mod_knit.FTPlainToFullText(None),
 
1341
            _mod_knit.DeltaPlainToFullText(logged_vf))
 
1342
        self.assertEqual('origin', ft_data)
 
1343
        self.assertEqual('base\nleft\nright\nmerged', delta_data)
 
1344
        self.assertEqual([('get_record_stream', [('left',)], 'unordered',
 
1345
            True)], logged_vf.calls)
1365
1346
 
1366
1347
 
1367
1348
class TestKeyMapper(TestCaseWithMemoryTransport):
1369
1350
 
1370
1351
    def test_identity_mapper(self):
1371
1352
        mapper = versionedfile.ConstantMapper("inventory")
1372
 
        self.assertEqual("inventory", mapper.map((b'foo@ar',)))
1373
 
        self.assertEqual("inventory", mapper.map((b'quux',)))
 
1353
        self.assertEqual("inventory", mapper.map(('foo@ar',)))
 
1354
        self.assertEqual("inventory", mapper.map(('quux',)))
1374
1355
 
1375
1356
    def test_prefix_mapper(self):
1376
1357
        #format5: plain
1377
1358
        mapper = versionedfile.PrefixMapper()
1378
 
        self.assertEqual("file-id", mapper.map((b"file-id", b"revision-id")))
1379
 
        self.assertEqual("new-id", mapper.map((b"new-id", b"revision-id")))
1380
 
        self.assertEqual((b'file-id',), mapper.unmap("file-id"))
1381
 
        self.assertEqual((b'new-id',), mapper.unmap("new-id"))
 
1359
        self.assertEqual("file-id", mapper.map(("file-id", "revision-id")))
 
1360
        self.assertEqual("new-id", mapper.map(("new-id", "revision-id")))
 
1361
        self.assertEqual(('file-id',), mapper.unmap("file-id"))
 
1362
        self.assertEqual(('new-id',), mapper.unmap("new-id"))
1382
1363
 
1383
1364
    def test_hash_prefix_mapper(self):
1384
1365
        #format6: hash + plain
1385
1366
        mapper = versionedfile.HashPrefixMapper()
1386
 
        self.assertEqual(
1387
 
            "9b/file-id", mapper.map((b"file-id", b"revision-id")))
1388
 
        self.assertEqual("45/new-id", mapper.map((b"new-id", b"revision-id")))
1389
 
        self.assertEqual((b'file-id',), mapper.unmap("9b/file-id"))
1390
 
        self.assertEqual((b'new-id',), mapper.unmap("45/new-id"))
 
1367
        self.assertEqual("9b/file-id", mapper.map(("file-id", "revision-id")))
 
1368
        self.assertEqual("45/new-id", mapper.map(("new-id", "revision-id")))
 
1369
        self.assertEqual(('file-id',), mapper.unmap("9b/file-id"))
 
1370
        self.assertEqual(('new-id',), mapper.unmap("45/new-id"))
1391
1371
 
1392
1372
    def test_hash_escaped_mapper(self):
1393
1373
        #knit1: hash + escaped
1394
1374
        mapper = versionedfile.HashEscapedPrefixMapper()
1395
 
        self.assertEqual("88/%2520", mapper.map((b" ", b"revision-id")))
1396
 
        self.assertEqual("ed/fil%2545-%2549d", mapper.map((b"filE-Id",
1397
 
                                                           b"revision-id")))
1398
 
        self.assertEqual("88/ne%2557-%2549d", mapper.map((b"neW-Id",
1399
 
                                                          b"revision-id")))
1400
 
        self.assertEqual((b'filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))
1401
 
        self.assertEqual((b'neW-Id',), mapper.unmap("88/ne%2557-%2549d"))
 
1375
        self.assertEqual("88/%2520", mapper.map((" ", "revision-id")))
 
1376
        self.assertEqual("ed/fil%2545-%2549d", mapper.map(("filE-Id",
 
1377
            "revision-id")))
 
1378
        self.assertEqual("88/ne%2557-%2549d", mapper.map(("neW-Id",
 
1379
            "revision-id")))
 
1380
        self.assertEqual(('filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))
 
1381
        self.assertEqual(('neW-Id',), mapper.unmap("88/ne%2557-%2549d"))
1402
1382
 
1403
1383
 
1404
1384
class TestVersionedFiles(TestCaseWithMemoryTransport):
1418
1398
        ('weave-named', {
1419
1399
            'cleanup': None,
1420
1400
            'factory': make_versioned_files_factory(WeaveFile,
1421
 
                                                    ConstantMapper('inventory')),
 
1401
                ConstantMapper('inventory')),
1422
1402
            'graph': True,
1423
1403
            'key_length': 1,
1424
1404
            'support_partial_insertion': False,
1463
1443
        ('weave-prefix', {
1464
1444
            'cleanup': None,
1465
1445
            'factory': make_versioned_files_factory(WeaveFile,
1466
 
                                                    PrefixMapper()),
 
1446
                PrefixMapper()),
1467
1447
            'graph': True,
1468
1448
            'key_length': 2,
1469
1449
            'support_partial_insertion': False,
1507
1487
        if self.key_length == 1:
1508
1488
            return (suffix,)
1509
1489
        else:
1510
 
            return (b'FileA',) + (suffix,)
 
1490
            return ('FileA',) + (suffix,)
1511
1491
 
1512
1492
    def test_add_fallback_implies_without_fallbacks(self):
1513
1493
        f = self.get_versionedfiles('files')
1515
1495
            raise TestNotApplicable("%s doesn't support fallbacks"
1516
1496
                                    % (f.__class__.__name__,))
1517
1497
        g = self.get_versionedfiles('fallback')
1518
 
        key_a = self.get_simple_key(b'a')
1519
 
        g.add_lines(key_a, [], [b'\n'])
 
1498
        key_a = self.get_simple_key('a')
 
1499
        g.add_lines(key_a, [], ['\n'])
1520
1500
        f.add_fallback_versioned_files(g)
1521
1501
        self.assertTrue(key_a in f.get_parent_map([key_a]))
1522
 
        self.assertFalse(
1523
 
            key_a in f.without_fallbacks().get_parent_map([key_a]))
 
1502
        self.assertFalse(key_a in f.without_fallbacks().get_parent_map([key_a]))
1524
1503
 
1525
1504
    def test_add_lines(self):
1526
1505
        f = self.get_versionedfiles()
1527
 
        key0 = self.get_simple_key(b'r0')
1528
 
        key1 = self.get_simple_key(b'r1')
1529
 
        key2 = self.get_simple_key(b'r2')
1530
 
        keyf = self.get_simple_key(b'foo')
1531
 
        f.add_lines(key0, [], [b'a\n', b'b\n'])
 
1506
        key0 = self.get_simple_key('r0')
 
1507
        key1 = self.get_simple_key('r1')
 
1508
        key2 = self.get_simple_key('r2')
 
1509
        keyf = self.get_simple_key('foo')
 
1510
        f.add_lines(key0, [], ['a\n', 'b\n'])
1532
1511
        if self.graph:
1533
 
            f.add_lines(key1, [key0], [b'b\n', b'c\n'])
 
1512
            f.add_lines(key1, [key0], ['b\n', 'c\n'])
1534
1513
        else:
1535
 
            f.add_lines(key1, [], [b'b\n', b'c\n'])
 
1514
            f.add_lines(key1, [], ['b\n', 'c\n'])
1536
1515
        keys = f.keys()
1537
1516
        self.assertTrue(key0 in keys)
1538
1517
        self.assertTrue(key1 in keys)
1540
1519
        for record in f.get_record_stream([key0, key1], 'unordered', True):
1541
1520
            records.append((record.key, record.get_bytes_as('fulltext')))
1542
1521
        records.sort()
1543
 
        self.assertEqual([(key0, b'a\nb\n'), (key1, b'b\nc\n')], records)
1544
 
 
1545
 
    def test_add_chunks(self):
1546
 
        f = self.get_versionedfiles()
1547
 
        key0 = self.get_simple_key(b'r0')
1548
 
        key1 = self.get_simple_key(b'r1')
1549
 
        key2 = self.get_simple_key(b'r2')
1550
 
        keyf = self.get_simple_key(b'foo')
1551
 
        def add_chunks(key, parents, chunks):
1552
 
            factory = ChunkedContentFactory(
1553
 
                key, parents, osutils.sha_strings(chunks), chunks)
1554
 
            return f.add_content(factory)
1555
 
 
1556
 
        add_chunks(key0, [], [b'a', b'\nb\n'])
1557
 
        if self.graph:
1558
 
            add_chunks(key1, [key0], [b'b', b'\n', b'c\n'])
1559
 
        else:
1560
 
            add_chunks(key1, [], [b'b\n', b'c\n'])
1561
 
        keys = f.keys()
1562
 
        self.assertIn(key0, keys)
1563
 
        self.assertIn(key1, keys)
1564
 
        records = []
1565
 
        for record in f.get_record_stream([key0, key1], 'unordered', True):
1566
 
            records.append((record.key, record.get_bytes_as('fulltext')))
1567
 
        records.sort()
1568
 
        self.assertEqual([(key0, b'a\nb\n'), (key1, b'b\nc\n')], records)
 
1522
        self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1569
1523
 
1570
1524
    def test_annotate(self):
1571
1525
        files = self.get_versionedfiles()
1573
1527
        if self.key_length == 1:
1574
1528
            prefix = ()
1575
1529
        else:
1576
 
            prefix = (b'FileA',)
 
1530
            prefix = ('FileA',)
1577
1531
        # introduced full text
1578
 
        origins = files.annotate(prefix + (b'origin',))
 
1532
        origins = files.annotate(prefix + ('origin',))
1579
1533
        self.assertEqual([
1580
 
            (prefix + (b'origin',), b'origin\n')],
 
1534
            (prefix + ('origin',), 'origin\n')],
1581
1535
            origins)
1582
1536
        # a delta
1583
 
        origins = files.annotate(prefix + (b'base',))
 
1537
        origins = files.annotate(prefix + ('base',))
1584
1538
        self.assertEqual([
1585
 
            (prefix + (b'base',), b'base\n')],
 
1539
            (prefix + ('base',), 'base\n')],
1586
1540
            origins)
1587
1541
        # a merge
1588
 
        origins = files.annotate(prefix + (b'merged',))
 
1542
        origins = files.annotate(prefix + ('merged',))
1589
1543
        if self.graph:
1590
1544
            self.assertEqual([
1591
 
                (prefix + (b'base',), b'base\n'),
1592
 
                (prefix + (b'left',), b'left\n'),
1593
 
                (prefix + (b'right',), b'right\n'),
1594
 
                (prefix + (b'merged',), b'merged\n')
 
1545
                (prefix + ('base',), 'base\n'),
 
1546
                (prefix + ('left',), 'left\n'),
 
1547
                (prefix + ('right',), 'right\n'),
 
1548
                (prefix + ('merged',), 'merged\n')
1595
1549
                ],
1596
1550
                origins)
1597
1551
        else:
1598
1552
            # Without a graph everything is new.
1599
1553
            self.assertEqual([
1600
 
                (prefix + (b'merged',), b'base\n'),
1601
 
                (prefix + (b'merged',), b'left\n'),
1602
 
                (prefix + (b'merged',), b'right\n'),
1603
 
                (prefix + (b'merged',), b'merged\n')
 
1554
                (prefix + ('merged',), 'base\n'),
 
1555
                (prefix + ('merged',), 'left\n'),
 
1556
                (prefix + ('merged',), 'right\n'),
 
1557
                (prefix + ('merged',), 'merged\n')
1604
1558
                ],
1605
1559
                origins)
1606
1560
        self.assertRaises(RevisionNotPresent,
1607
 
                          files.annotate, prefix + ('missing-key',))
 
1561
            files.annotate, prefix + ('missing-key',))
1608
1562
 
1609
1563
    def test_check_no_parameters(self):
1610
1564
        files = self.get_versionedfiles()
1624
1578
        seen = set()
1625
1579
        # Texts output should be fulltexts.
1626
1580
        self.capture_stream(files, entries, seen.add,
1627
 
                            files.get_parent_map(keys), require_fulltext=True)
 
1581
            files.get_parent_map(keys), require_fulltext=True)
1628
1582
        # All texts should be output.
1629
1583
        self.assertEqual(set(keys), seen)
1630
1584
 
1637
1591
        files = self.get_versionedfiles()
1638
1592
 
1639
1593
    def get_diamond_files(self, files, trailing_eol=True, left_only=False,
1640
 
                          nokeys=False):
 
1594
        nokeys=False):
1641
1595
        return get_diamond_files(files, self.key_length,
1642
 
                                 trailing_eol=trailing_eol, nograph=not self.graph,
1643
 
                                 left_only=left_only, nokeys=nokeys)
 
1596
            trailing_eol=trailing_eol, nograph=not self.graph,
 
1597
            left_only=left_only, nokeys=nokeys)
1644
1598
 
1645
1599
    def _add_content_nostoresha(self, add_lines):
1646
1600
        """When nostore_sha is supplied using old content raises."""
1647
1601
        vf = self.get_versionedfiles()
1648
 
        empty_text = (b'a', [])
1649
 
        sample_text_nl = (b'b', [b"foo\n", b"bar\n"])
1650
 
        sample_text_no_nl = (b'c', [b"foo\n", b"bar"])
 
1602
        empty_text = ('a', [])
 
1603
        sample_text_nl = ('b', ["foo\n", "bar\n"])
 
1604
        sample_text_no_nl = ('c', ["foo\n", "bar"])
1651
1605
        shas = []
1652
1606
        for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
1653
1607
            if add_lines:
1659
1613
            shas.append(sha)
1660
1614
        # we now have a copy of all the lines in the vf.
1661
1615
        for sha, (version, lines) in zip(
1662
 
                shas, (empty_text, sample_text_nl, sample_text_no_nl)):
1663
 
            new_key = self.get_simple_key(version + b"2")
1664
 
            self.assertRaises(errors.ExistingContent,
1665
 
                              vf.add_lines, new_key, [], lines,
1666
 
                              nostore_sha=sha)
1667
 
            self.assertRaises(errors.ExistingContent,
1668
 
                              vf.add_lines, new_key, [], lines,
1669
 
                              nostore_sha=sha)
 
1616
            shas, (empty_text, sample_text_nl, sample_text_no_nl)):
 
1617
            new_key = self.get_simple_key(version + "2")
 
1618
            self.assertRaises(errors.ExistingContent,
 
1619
                vf.add_lines, new_key, [], lines,
 
1620
                nostore_sha=sha)
 
1621
            self.assertRaises(errors.ExistingContent,
 
1622
                vf.add_lines, new_key, [], lines,
 
1623
                nostore_sha=sha)
1670
1624
            # and no new version should have been added.
1671
1625
            record = next(vf.get_record_stream([new_key], 'unordered', True))
1672
1626
            self.assertEqual('absent', record.storage_kind)
1685
1639
            results.append(add[:2])
1686
1640
        if self.key_length == 1:
1687
1641
            self.assertEqual([
1688
 
                (b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1689
 
                (b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1690
 
                (b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1691
 
                (b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1692
 
                (b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
 
1642
                ('00e364d235126be43292ab09cb4686cf703ddc17', 7),
 
1643
                ('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
 
1644
                ('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
 
1645
                ('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
 
1646
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1693
1647
                results)
1694
1648
        elif self.key_length == 2:
1695
1649
            self.assertEqual([
1696
 
                (b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1697
 
                (b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1698
 
                (b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1699
 
                (b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1700
 
                (b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1701
 
                (b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1702
 
                (b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1703
 
                (b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1704
 
                (b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1705
 
                (b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
 
1650
                ('00e364d235126be43292ab09cb4686cf703ddc17', 7),
 
1651
                ('00e364d235126be43292ab09cb4686cf703ddc17', 7),
 
1652
                ('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
 
1653
                ('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
 
1654
                ('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
 
1655
                ('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
 
1656
                ('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
 
1657
                ('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
 
1658
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
 
1659
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1706
1660
                results)
1707
1661
 
1708
1662
    def test_add_lines_no_key_generates_chk_key(self):
1716
1670
            results.append(add[:2])
1717
1671
        if self.key_length == 1:
1718
1672
            self.assertEqual([
1719
 
                (b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1720
 
                (b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1721
 
                (b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1722
 
                (b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1723
 
                (b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
 
1673
                ('00e364d235126be43292ab09cb4686cf703ddc17', 7),
 
1674
                ('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
 
1675
                ('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
 
1676
                ('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
 
1677
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1724
1678
                results)
1725
1679
            # Check the added items got CHK keys.
1726
1680
            self.assertEqual({
1727
 
                (b'sha1:00e364d235126be43292ab09cb4686cf703ddc17',),
1728
 
                (b'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',),
1729
 
                (b'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',),
1730
 
                (b'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f',),
1731
 
                (b'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d',),
 
1681
                ('sha1:00e364d235126be43292ab09cb4686cf703ddc17',),
 
1682
                ('sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',),
 
1683
                ('sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',),
 
1684
                ('sha1:a8478686da38e370e32e42e8a0c220e33ee9132f',),
 
1685
                ('sha1:ed8bce375198ea62444dc71952b22cfc2b09226d',),
1732
1686
                },
1733
1687
                files.keys())
1734
1688
        elif self.key_length == 2:
1735
1689
            self.assertEqual([
1736
 
                (b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1737
 
                (b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1738
 
                (b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1739
 
                (b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1740
 
                (b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1741
 
                (b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1742
 
                (b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1743
 
                (b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1744
 
                (b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1745
 
                (b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
 
1690
                ('00e364d235126be43292ab09cb4686cf703ddc17', 7),
 
1691
                ('00e364d235126be43292ab09cb4686cf703ddc17', 7),
 
1692
                ('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
 
1693
                ('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
 
1694
                ('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
 
1695
                ('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
 
1696
                ('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
 
1697
                ('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
 
1698
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
 
1699
                ('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1746
1700
                results)
1747
1701
            # Check the added items got CHK keys.
1748
1702
            self.assertEqual({
1749
 
                (b'FileA', b'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1750
 
                (b'FileA', b'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1751
 
                (b'FileA', b'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1752
 
                (b'FileA', b'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1753
 
                (b'FileA', b'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1754
 
                (b'FileB', b'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1755
 
                (b'FileB', b'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1756
 
                (b'FileB', b'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1757
 
                (b'FileB', b'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1758
 
                (b'FileB', b'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
 
1703
                ('FileA', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
 
1704
                ('FileA', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
 
1705
                ('FileA', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
 
1706
                ('FileA', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
 
1707
                ('FileA', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
 
1708
                ('FileB', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
 
1709
                ('FileB', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
 
1710
                ('FileB', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
 
1711
                ('FileB', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
 
1712
                ('FileB', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1759
1713
                },
1760
1714
                files.keys())
1761
1715
 
1762
1716
    def test_empty_lines(self):
1763
1717
        """Empty files can be stored."""
1764
1718
        f = self.get_versionedfiles()
1765
 
        key_a = self.get_simple_key(b'a')
 
1719
        key_a = self.get_simple_key('a')
1766
1720
        f.add_lines(key_a, [], [])
1767
 
        self.assertEqual(b'',
1768
 
                         next(f.get_record_stream([key_a], 'unordered', True
1769
 
                                                  )).get_bytes_as('fulltext'))
1770
 
        key_b = self.get_simple_key(b'b')
 
1721
        self.assertEqual('',
 
1722
            f.get_record_stream([key_a], 'unordered', True
 
1723
                ).next().get_bytes_as('fulltext'))
 
1724
        key_b = self.get_simple_key('b')
1771
1725
        f.add_lines(key_b, self.get_parents([key_a]), [])
1772
 
        self.assertEqual(b'',
1773
 
                         next(f.get_record_stream([key_b], 'unordered', True
1774
 
                                                  )).get_bytes_as('fulltext'))
 
1726
        self.assertEqual('',
 
1727
            f.get_record_stream([key_b], 'unordered', True
 
1728
                ).next().get_bytes_as('fulltext'))
1775
1729
 
1776
1730
    def test_newline_only(self):
1777
1731
        f = self.get_versionedfiles()
1778
 
        key_a = self.get_simple_key(b'a')
1779
 
        f.add_lines(key_a, [], [b'\n'])
1780
 
        self.assertEqual(b'\n',
1781
 
                         next(f.get_record_stream([key_a], 'unordered', True
1782
 
                                                  )).get_bytes_as('fulltext'))
1783
 
        key_b = self.get_simple_key(b'b')
1784
 
        f.add_lines(key_b, self.get_parents([key_a]), [b'\n'])
1785
 
        self.assertEqual(b'\n',
1786
 
                         next(f.get_record_stream([key_b], 'unordered', True
1787
 
                                                  )).get_bytes_as('fulltext'))
 
1732
        key_a = self.get_simple_key('a')
 
1733
        f.add_lines(key_a, [], ['\n'])
 
1734
        self.assertEqual('\n',
 
1735
            f.get_record_stream([key_a], 'unordered', True
 
1736
                ).next().get_bytes_as('fulltext'))
 
1737
        key_b = self.get_simple_key('b')
 
1738
        f.add_lines(key_b, self.get_parents([key_a]), ['\n'])
 
1739
        self.assertEqual('\n',
 
1740
            f.get_record_stream([key_b], 'unordered', True
 
1741
                ).next().get_bytes_as('fulltext'))
1788
1742
 
1789
1743
    def test_get_known_graph_ancestry(self):
1790
1744
        f = self.get_versionedfiles()
1791
1745
        if not self.graph:
1792
1746
            raise TestNotApplicable('ancestry info only relevant with graph.')
1793
 
        key_a = self.get_simple_key(b'a')
1794
 
        key_b = self.get_simple_key(b'b')
1795
 
        key_c = self.get_simple_key(b'c')
 
1747
        key_a = self.get_simple_key('a')
 
1748
        key_b = self.get_simple_key('b')
 
1749
        key_c = self.get_simple_key('c')
1796
1750
        # A
1797
1751
        # |\
1798
1752
        # | B
1799
1753
        # |/
1800
1754
        # C
1801
 
        f.add_lines(key_a, [], [b'\n'])
1802
 
        f.add_lines(key_b, [key_a], [b'\n'])
1803
 
        f.add_lines(key_c, [key_a, key_b], [b'\n'])
 
1755
        f.add_lines(key_a, [], ['\n'])
 
1756
        f.add_lines(key_b, [key_a], ['\n'])
 
1757
        f.add_lines(key_c, [key_a, key_b], ['\n'])
1804
1758
        kg = f.get_known_graph_ancestry([key_c])
1805
1759
        self.assertIsInstance(kg, _mod_graph.KnownGraph)
1806
1760
        self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
1812
1766
        if getattr(f, 'add_fallback_versioned_files', None) is None:
1813
1767
            raise TestNotApplicable("%s doesn't support fallbacks"
1814
1768
                                    % (f.__class__.__name__,))
1815
 
        key_a = self.get_simple_key(b'a')
1816
 
        key_b = self.get_simple_key(b'b')
1817
 
        key_c = self.get_simple_key(b'c')
 
1769
        key_a = self.get_simple_key('a')
 
1770
        key_b = self.get_simple_key('b')
 
1771
        key_c = self.get_simple_key('c')
1818
1772
        # A     only in fallback
1819
1773
        # |\
1820
1774
        # | B
1821
1775
        # |/
1822
1776
        # C
1823
1777
        g = self.get_versionedfiles('fallback')
1824
 
        g.add_lines(key_a, [], [b'\n'])
 
1778
        g.add_lines(key_a, [], ['\n'])
1825
1779
        f.add_fallback_versioned_files(g)
1826
 
        f.add_lines(key_b, [key_a], [b'\n'])
1827
 
        f.add_lines(key_c, [key_a, key_b], [b'\n'])
 
1780
        f.add_lines(key_b, [key_a], ['\n'])
 
1781
        f.add_lines(key_c, [key_a, key_b], ['\n'])
1828
1782
        kg = f.get_known_graph_ancestry([key_c])
1829
1783
        self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
1830
1784
 
1837
1791
    def assertValidStorageKind(self, storage_kind):
1838
1792
        """Assert that storage_kind is a valid storage_kind."""
1839
1793
        self.assertSubset([storage_kind],
1840
 
                          ['mpdiff', 'knit-annotated-ft', 'knit-annotated-delta',
1841
 
                           'knit-ft', 'knit-delta', 'chunked', 'fulltext',
1842
 
                           'knit-annotated-ft-gz', 'knit-annotated-delta-gz', 'knit-ft-gz',
1843
 
                           'knit-delta-gz',
1844
 
                           'knit-delta-closure', 'knit-delta-closure-ref',
1845
 
                           'groupcompress-block', 'groupcompress-block-ref'])
 
1794
            ['mpdiff', 'knit-annotated-ft', 'knit-annotated-delta',
 
1795
             'knit-ft', 'knit-delta', 'chunked', 'fulltext',
 
1796
             'knit-annotated-ft-gz', 'knit-annotated-delta-gz', 'knit-ft-gz',
 
1797
             'knit-delta-gz',
 
1798
             'knit-delta-closure', 'knit-delta-closure-ref',
 
1799
             'groupcompress-block', 'groupcompress-block-ref'])
1846
1800
 
1847
1801
    def capture_stream(self, f, entries, on_seen, parents,
1848
 
                       require_fulltext=False):
 
1802
        require_fulltext=False):
1849
1803
        """Capture a stream for testing."""
1850
1804
        for factory in entries:
1851
1805
            on_seen(factory.key)
1852
1806
            self.assertValidStorageKind(factory.storage_kind)
1853
1807
            if factory.sha1 is not None:
1854
1808
                self.assertEqual(f.get_sha1s([factory.key])[factory.key],
1855
 
                                 factory.sha1)
 
1809
                    factory.sha1)
1856
1810
            self.assertEqual(parents[factory.key], factory.parents)
1857
1811
            self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1858
 
                                  bytes)
 
1812
                str)
1859
1813
            if require_fulltext:
1860
1814
                factory.get_bytes_as('fulltext')
1861
1815
 
1873
1827
    def get_keys_and_sort_order(self):
1874
1828
        """Get diamond test keys list, and their sort ordering."""
1875
1829
        if self.key_length == 1:
1876
 
            keys = [(b'merged',), (b'left',), (b'right',), (b'base',)]
1877
 
            sort_order = {(b'merged',): 2, (b'left',): 1,
1878
 
                          (b'right',): 1, (b'base',): 0}
 
1830
            keys = [('merged',), ('left',), ('right',), ('base',)]
 
1831
            sort_order = {('merged',):2, ('left',):1, ('right',):1, ('base',):0}
1879
1832
        else:
1880
1833
            keys = [
1881
 
                (b'FileA', b'merged'), (b'FileA', b'left'), (b'FileA', b'right'),
1882
 
                (b'FileA', b'base'),
1883
 
                (b'FileB', b'merged'), (b'FileB', b'left'), (b'FileB', b'right'),
1884
 
                (b'FileB', b'base'),
 
1834
                ('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
 
1835
                ('FileA', 'base'),
 
1836
                ('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
 
1837
                ('FileB', 'base'),
1885
1838
                ]
1886
1839
            sort_order = {
1887
 
                (b'FileA', b'merged'): 2, (b'FileA', b'left'): 1, (b'FileA', b'right'): 1,
1888
 
                (b'FileA', b'base'): 0,
1889
 
                (b'FileB', b'merged'): 2, (b'FileB', b'left'): 1, (b'FileB', b'right'): 1,
1890
 
                (b'FileB', b'base'): 0,
 
1840
                ('FileA', 'merged'): 2, ('FileA', 'left'): 1, ('FileA', 'right'): 1,
 
1841
                ('FileA', 'base'): 0,
 
1842
                ('FileB', 'merged'): 2, ('FileB', 'left'): 1, ('FileB', 'right'): 1,
 
1843
                ('FileB', 'base'): 0,
1891
1844
                }
1892
1845
        return keys, sort_order
1893
1846
 
1894
1847
    def get_keys_and_groupcompress_sort_order(self):
1895
1848
        """Get diamond test keys list, and their groupcompress sort ordering."""
1896
1849
        if self.key_length == 1:
1897
 
            keys = [(b'merged',), (b'left',), (b'right',), (b'base',)]
1898
 
            sort_order = {(b'merged',): 0, (b'left',): 1,
1899
 
                          (b'right',): 1, (b'base',): 2}
 
1850
            keys = [('merged',), ('left',), ('right',), ('base',)]
 
1851
            sort_order = {('merged',):0, ('left',):1, ('right',):1, ('base',):2}
1900
1852
        else:
1901
1853
            keys = [
1902
 
                (b'FileA', b'merged'), (b'FileA', b'left'), (b'FileA', b'right'),
1903
 
                (b'FileA', b'base'),
1904
 
                (b'FileB', b'merged'), (b'FileB', b'left'), (b'FileB', b'right'),
1905
 
                (b'FileB', b'base'),
 
1854
                ('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
 
1855
                ('FileA', 'base'),
 
1856
                ('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
 
1857
                ('FileB', 'base'),
1906
1858
                ]
1907
1859
            sort_order = {
1908
 
                (b'FileA', b'merged'): 0, (b'FileA', b'left'): 1, (b'FileA', b'right'): 1,
1909
 
                (b'FileA', b'base'): 2,
1910
 
                (b'FileB', b'merged'): 3, (b'FileB', b'left'): 4, (b'FileB', b'right'): 4,
1911
 
                (b'FileB', b'base'): 5,
 
1860
                ('FileA', 'merged'): 0, ('FileA', 'left'): 1, ('FileA', 'right'): 1,
 
1861
                ('FileA', 'base'): 2,
 
1862
                ('FileB', 'merged'): 3, ('FileB', 'left'): 4, ('FileB', 'right'): 4,
 
1863
                ('FileB', 'base'): 5,
1912
1864
                }
1913
1865
        return keys, sort_order
1914
1866
 
1935
1887
            seen.append(factory.key)
1936
1888
            self.assertValidStorageKind(factory.storage_kind)
1937
1889
            self.assertSubset([factory.sha1],
1938
 
                              [None, files.get_sha1s([factory.key])[factory.key]])
 
1890
                [None, files.get_sha1s([factory.key])[factory.key]])
1939
1891
            self.assertEqual(parent_map[factory.key], factory.parents)
1940
1892
            # self.assertEqual(files.get_text(factory.key),
1941
1893
            ft_bytes = factory.get_bytes_as('fulltext')
1942
 
            self.assertIsInstance(ft_bytes, bytes)
 
1894
            self.assertIsInstance(ft_bytes, str)
1943
1895
            chunked_bytes = factory.get_bytes_as('chunked')
1944
 
            self.assertEqualDiff(ft_bytes, b''.join(chunked_bytes))
1945
 
            chunked_bytes = factory.iter_bytes_as('chunked')
1946
 
            self.assertEqualDiff(ft_bytes, b''.join(chunked_bytes))
 
1896
            self.assertEqualDiff(ft_bytes, ''.join(chunked_bytes))
1947
1897
 
1948
1898
        self.assertStreamOrder(sort_order, seen, keys)
1949
1899
 
1961
1911
    def assertStreamOrder(self, sort_order, seen, keys):
1962
1912
        self.assertEqual(len(set(seen)), len(keys))
1963
1913
        if self.key_length == 1:
1964
 
            lows = {(): 0}
 
1914
            lows = {():0}
1965
1915
        else:
1966
 
            lows = {(b'FileA',): 0, (b'FileB',): 0}
 
1916
            lows = {('FileA',):0, ('FileB',):0}
1967
1917
        if not self.graph:
1968
1918
            self.assertEqual(set(keys), set(seen))
1969
1919
        else:
1970
1920
            for key in seen:
1971
1921
                sort_pos = sort_order[key]
1972
1922
                self.assertTrue(sort_pos >= lows[key[:-1]],
1973
 
                                "Out of order in sorted stream: %r, %r" % (key, seen))
 
1923
                    "Out of order in sorted stream: %r, %r" % (key, seen))
1974
1924
                lows[key[:-1]] = sort_pos
1975
1925
 
1976
1926
    def test_get_record_stream_unknown_storage_kind_raises(self):
1978
1928
        files = self.get_versionedfiles()
1979
1929
        self.get_diamond_files(files)
1980
1930
        if self.key_length == 1:
1981
 
            keys = [(b'merged',), (b'left',), (b'right',), (b'base',)]
 
1931
            keys = [('merged',), ('left',), ('right',), ('base',)]
1982
1932
        else:
1983
1933
            keys = [
1984
 
                (b'FileA', b'merged'), (b'FileA', b'left'), (b'FileA', b'right'),
1985
 
                (b'FileA', b'base'),
1986
 
                (b'FileB', b'merged'), (b'FileB', b'left'), (b'FileB', b'right'),
1987
 
                (b'FileB', b'base'),
 
1934
                ('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
 
1935
                ('FileA', 'base'),
 
1936
                ('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
 
1937
                ('FileB', 'base'),
1988
1938
                ]
1989
1939
        parent_map = files.get_parent_map(keys)
1990
1940
        entries = files.get_record_stream(keys, 'unordered', False)
2000
1950
            self.assertEqual(parent_map[factory.key], factory.parents)
2001
1951
            # currently no stream emits mpdiff
2002
1952
            self.assertRaises(errors.UnavailableRepresentation,
2003
 
                              factory.get_bytes_as, 'mpdiff')
 
1953
                factory.get_bytes_as, 'mpdiff')
2004
1954
            self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
2005
 
                                  bytes)
 
1955
                str)
2006
1956
        self.assertEqual(set(keys), seen)
2007
1957
 
2008
1958
    def test_get_record_stream_missing_records_are_absent(self):
2009
1959
        files = self.get_versionedfiles()
2010
1960
        self.get_diamond_files(files)
2011
1961
        if self.key_length == 1:
2012
 
            keys = [(b'merged',), (b'left',), (b'right',),
2013
 
                    (b'absent',), (b'base',)]
 
1962
            keys = [('merged',), ('left',), ('right',), ('absent',), ('base',)]
2014
1963
        else:
2015
1964
            keys = [
2016
 
                (b'FileA', b'merged'), (b'FileA', b'left'), (b'FileA', b'right'),
2017
 
                (b'FileA', b'absent'), (b'FileA', b'base'),
2018
 
                (b'FileB', b'merged'), (b'FileB', b'left'), (b'FileB', b'right'),
2019
 
                (b'FileB', b'absent'), (b'FileB', b'base'),
2020
 
                (b'absent', b'absent'),
 
1965
                ('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
 
1966
                ('FileA', 'absent'), ('FileA', 'base'),
 
1967
                ('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
 
1968
                ('FileB', 'absent'), ('FileB', 'base'),
 
1969
                ('absent', 'absent'),
2021
1970
                ]
2022
1971
        parent_map = files.get_parent_map(keys)
2023
1972
        entries = files.get_record_stream(keys, 'unordered', False)
2028
1977
    def assertRecordHasContent(self, record, bytes):
2029
1978
        """Assert that record has the bytes bytes."""
2030
1979
        self.assertEqual(bytes, record.get_bytes_as('fulltext'))
2031
 
        self.assertEqual(bytes, b''.join(record.get_bytes_as('chunked')))
 
1980
        self.assertEqual(bytes, ''.join(record.get_bytes_as('chunked')))
2032
1981
 
2033
1982
    def test_get_record_stream_native_formats_are_wire_ready_one_ft(self):
2034
1983
        files = self.get_versionedfiles()
2035
 
        key = self.get_simple_key(b'foo')
2036
 
        files.add_lines(key, (), [b'my text\n', b'content'])
 
1984
        key = self.get_simple_key('foo')
 
1985
        files.add_lines(key, (), ['my text\n', 'content'])
2037
1986
        stream = files.get_record_stream([key], 'unordered', False)
2038
1987
        record = next(stream)
2039
1988
        if record.storage_kind in ('chunked', 'fulltext'):
2040
1989
            # chunked and fulltext representations are for direct use not wire
2041
1990
            # serialisation: check they are able to be used directly. To send
2042
1991
            # such records over the wire translation will be needed.
2043
 
            self.assertRecordHasContent(record, b"my text\ncontent")
 
1992
            self.assertRecordHasContent(record, "my text\ncontent")
2044
1993
        else:
2045
1994
            bytes = [record.get_bytes_as(record.storage_kind)]
2046
1995
            network_stream = versionedfile.NetworkRecordStream(bytes).read()
2049
1998
            for record in network_stream:
2050
1999
                records.append(record)
2051
2000
                self.assertEqual(source_record.storage_kind,
2052
 
                                 record.storage_kind)
 
2001
                    record.storage_kind)
2053
2002
                self.assertEqual(source_record.parents, record.parents)
2054
2003
                self.assertEqual(
2055
2004
                    source_record.get_bytes_as(source_record.storage_kind),
2072
2021
            yield record
2073
2022
 
2074
2023
    def stream_to_bytes_or_skip_counter(self, skipped_records, full_texts,
2075
 
                                        stream):
 
2024
        stream):
2076
2025
        """Convert a stream to a bytes iterator.
2077
2026
 
2078
2027
        :param skipped_records: A list with one element to increment when a
2093
2042
    def test_get_record_stream_native_formats_are_wire_ready_ft_delta(self):
2094
2043
        files = self.get_versionedfiles()
2095
2044
        target_files = self.get_versionedfiles('target')
2096
 
        key = self.get_simple_key(b'ft')
2097
 
        key_delta = self.get_simple_key(b'delta')
2098
 
        files.add_lines(key, (), [b'my text\n', b'content'])
 
2045
        key = self.get_simple_key('ft')
 
2046
        key_delta = self.get_simple_key('delta')
 
2047
        files.add_lines(key, (), ['my text\n', 'content'])
2099
2048
        if self.graph:
2100
2049
            delta_parents = (key,)
2101
2050
        else:
2102
2051
            delta_parents = ()
2103
 
        files.add_lines(key_delta, delta_parents, [
2104
 
                        b'different\n', b'content\n'])
 
2052
        files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
2105
2053
        local = files.get_record_stream([key, key_delta], 'unordered', False)
2106
2054
        ref = files.get_record_stream([key, key_delta], 'unordered', False)
2107
2055
        skipped_records = [0]
2108
2056
        full_texts = {
2109
 
            key: b"my text\ncontent",
2110
 
            key_delta: b"different\ncontent\n",
 
2057
            key: "my text\ncontent",
 
2058
            key_delta: "different\ncontent\n",
2111
2059
            }
2112
2060
        byte_stream = self.stream_to_bytes_or_skip_counter(
2113
2061
            skipped_records, full_texts, local)
2128
2076
        # copy a delta over the wire
2129
2077
        files = self.get_versionedfiles()
2130
2078
        target_files = self.get_versionedfiles('target')
2131
 
        key = self.get_simple_key(b'ft')
2132
 
        key_delta = self.get_simple_key(b'delta')
2133
 
        files.add_lines(key, (), [b'my text\n', b'content'])
 
2079
        key = self.get_simple_key('ft')
 
2080
        key_delta = self.get_simple_key('delta')
 
2081
        files.add_lines(key, (), ['my text\n', 'content'])
2134
2082
        if self.graph:
2135
2083
            delta_parents = (key,)
2136
2084
        else:
2137
2085
            delta_parents = ()
2138
 
        files.add_lines(key_delta, delta_parents, [
2139
 
                        b'different\n', b'content\n'])
 
2086
        files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
2140
2087
        # Copy the basis text across so we can reconstruct the delta during
2141
2088
        # insertion into target.
2142
2089
        target_files.insert_record_stream(files.get_record_stream([key],
2143
 
                                                                  'unordered', False))
 
2090
            'unordered', False))
2144
2091
        local = files.get_record_stream([key_delta], 'unordered', False)
2145
2092
        ref = files.get_record_stream([key_delta], 'unordered', False)
2146
2093
        skipped_records = [0]
2147
2094
        full_texts = {
2148
 
            key_delta: b"different\ncontent\n",
 
2095
            key_delta: "different\ncontent\n",
2149
2096
            }
2150
2097
        byte_stream = self.stream_to_bytes_or_skip_counter(
2151
2098
            skipped_records, full_texts, local)
2165
2112
    def test_get_record_stream_wire_ready_delta_closure_included(self):
2166
2113
        # copy a delta over the wire with the ability to get its full text.
2167
2114
        files = self.get_versionedfiles()
2168
 
        key = self.get_simple_key(b'ft')
2169
 
        key_delta = self.get_simple_key(b'delta')
2170
 
        files.add_lines(key, (), [b'my text\n', b'content'])
 
2115
        key = self.get_simple_key('ft')
 
2116
        key_delta = self.get_simple_key('delta')
 
2117
        files.add_lines(key, (), ['my text\n', 'content'])
2171
2118
        if self.graph:
2172
2119
            delta_parents = (key,)
2173
2120
        else:
2174
2121
            delta_parents = ()
2175
 
        files.add_lines(key_delta, delta_parents, [
2176
 
                        b'different\n', b'content\n'])
 
2122
        files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
2177
2123
        local = files.get_record_stream([key_delta], 'unordered', True)
2178
2124
        ref = files.get_record_stream([key_delta], 'unordered', True)
2179
2125
        skipped_records = [0]
2180
2126
        full_texts = {
2181
 
            key_delta: b"different\ncontent\n",
 
2127
            key_delta: "different\ncontent\n",
2182
2128
            }
2183
2129
        byte_stream = self.stream_to_bytes_or_skip_counter(
2184
2130
            skipped_records, full_texts, local)
2198
2144
        seen = set()
2199
2145
        for factory in entries:
2200
2146
            seen.add(factory.key)
2201
 
            if factory.key[-1] == b'absent':
 
2147
            if factory.key[-1] == 'absent':
2202
2148
                self.assertEqual('absent', factory.storage_kind)
2203
2149
                self.assertEqual(None, factory.sha1)
2204
2150
                self.assertEqual(None, factory.parents)
2209
2155
                    self.assertEqual(sha1, factory.sha1)
2210
2156
                self.assertEqual(parents[factory.key], factory.parents)
2211
2157
                self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
2212
 
                                      bytes)
 
2158
                    str)
2213
2159
        self.assertEqual(set(keys), seen)
2214
2160
 
2215
2161
    def test_filter_absent_records(self):
2223
2169
        # absent keys is still delivered).
2224
2170
        present_keys = list(keys)
2225
2171
        if self.key_length == 1:
2226
 
            keys.insert(2, (b'extra',))
 
2172
            keys.insert(2, ('extra',))
2227
2173
        else:
2228
 
            keys.insert(2, (b'extra', b'extra'))
 
2174
            keys.insert(2, ('extra', 'extra'))
2229
2175
        entries = files.get_record_stream(keys, 'unordered', False)
2230
2176
        seen = set()
2231
2177
        self.capture_stream(files, versionedfile.filter_absent(entries), seen.add,
2232
 
                            parent_map)
 
2178
            parent_map)
2233
2179
        self.assertEqual(set(present_keys), seen)
2234
2180
 
2235
2181
    def get_mapper(self):
2249
2195
    def test_get_annotator(self):
2250
2196
        files = self.get_versionedfiles()
2251
2197
        self.get_diamond_files(files)
2252
 
        origin_key = self.get_simple_key(b'origin')
2253
 
        base_key = self.get_simple_key(b'base')
2254
 
        left_key = self.get_simple_key(b'left')
2255
 
        right_key = self.get_simple_key(b'right')
2256
 
        merged_key = self.get_simple_key(b'merged')
 
2198
        origin_key = self.get_simple_key('origin')
 
2199
        base_key = self.get_simple_key('base')
 
2200
        left_key = self.get_simple_key('left')
 
2201
        right_key = self.get_simple_key('right')
 
2202
        merged_key = self.get_simple_key('merged')
2257
2203
        # annotator = files.get_annotator()
2258
2204
        # introduced full text
2259
2205
        origins, lines = files.get_annotator().annotate(origin_key)
2260
2206
        self.assertEqual([(origin_key,)], origins)
2261
 
        self.assertEqual([b'origin\n'], lines)
 
2207
        self.assertEqual(['origin\n'], lines)
2262
2208
        # a delta
2263
2209
        origins, lines = files.get_annotator().annotate(base_key)
2264
2210
        self.assertEqual([(base_key,)], origins)
2280
2226
                (merged_key,),
2281
2227
                ], origins)
2282
2228
        self.assertRaises(RevisionNotPresent,
2283
 
                          files.get_annotator().annotate, self.get_simple_key(b'missing-key'))
 
2229
            files.get_annotator().annotate, self.get_simple_key('missing-key'))
2284
2230
 
2285
2231
    def test_get_parent_map(self):
2286
2232
        files = self.get_versionedfiles()
2287
2233
        if self.key_length == 1:
2288
2234
            parent_details = [
2289
 
                ((b'r0',), self.get_parents(())),
2290
 
                ((b'r1',), self.get_parents(((b'r0',),))),
2291
 
                ((b'r2',), self.get_parents(())),
2292
 
                ((b'r3',), self.get_parents(())),
2293
 
                ((b'm',), self.get_parents(((b'r0',), (b'r1',), (b'r2',), (b'r3',)))),
 
2235
                (('r0',), self.get_parents(())),
 
2236
                (('r1',), self.get_parents((('r0',),))),
 
2237
                (('r2',), self.get_parents(())),
 
2238
                (('r3',), self.get_parents(())),
 
2239
                (('m',), self.get_parents((('r0',), ('r1',), ('r2',), ('r3',)))),
2294
2240
                ]
2295
2241
        else:
2296
2242
            parent_details = [
2297
 
                ((b'FileA', b'r0'), self.get_parents(())),
2298
 
                ((b'FileA', b'r1'), self.get_parents(((b'FileA', b'r0'),))),
2299
 
                ((b'FileA', b'r2'), self.get_parents(())),
2300
 
                ((b'FileA', b'r3'), self.get_parents(())),
2301
 
                ((b'FileA', b'm'), self.get_parents(((b'FileA', b'r0'),
2302
 
                                                     (b'FileA', b'r1'), (b'FileA', b'r2'), (b'FileA', b'r3')))),
 
2243
                (('FileA', 'r0'), self.get_parents(())),
 
2244
                (('FileA', 'r1'), self.get_parents((('FileA', 'r0'),))),
 
2245
                (('FileA', 'r2'), self.get_parents(())),
 
2246
                (('FileA', 'r3'), self.get_parents(())),
 
2247
                (('FileA', 'm'), self.get_parents((('FileA', 'r0'),
 
2248
                    ('FileA', 'r1'), ('FileA', 'r2'), ('FileA', 'r3')))),
2303
2249
                ]
2304
2250
        for key, parents in parent_details:
2305
2251
            files.add_lines(key, parents, [])
2306
2252
            # immediately after adding it should be queryable.
2307
 
            self.assertEqual({key: parents}, files.get_parent_map([key]))
 
2253
            self.assertEqual({key:parents}, files.get_parent_map([key]))
2308
2254
        # We can ask for an empty set
2309
2255
        self.assertEqual({}, files.get_parent_map([]))
2310
2256
        # We can ask for many keys
2311
2257
        all_parents = dict(parent_details)
2312
2258
        self.assertEqual(all_parents, files.get_parent_map(all_parents.keys()))
2313
2259
        # Absent keys are just not included in the result.
2314
 
        keys = list(all_parents.keys())
 
2260
        keys = all_parents.keys()
2315
2261
        if self.key_length == 1:
2316
 
            keys.insert(1, (b'missing',))
 
2262
            keys.insert(1, ('missing',))
2317
2263
        else:
2318
 
            keys.insert(1, (b'missing', b'missing'))
 
2264
            keys.insert(1, ('missing', 'missing'))
2319
2265
        # Absent keys are just ignored
2320
2266
        self.assertEqual(all_parents, files.get_parent_map(keys))
2321
2267
 
2323
2269
        files = self.get_versionedfiles()
2324
2270
        self.get_diamond_files(files)
2325
2271
        if self.key_length == 1:
2326
 
            keys = [(b'base',), (b'origin',), (b'left',),
2327
 
                    (b'merged',), (b'right',)]
 
2272
            keys = [('base',), ('origin',), ('left',), ('merged',), ('right',)]
2328
2273
        else:
2329
2274
            # ask for shas from different prefixes.
2330
2275
            keys = [
2331
 
                (b'FileA', b'base'), (b'FileB', b'origin'), (b'FileA', b'left'),
2332
 
                (b'FileA', b'merged'), (b'FileB', b'right'),
 
2276
                ('FileA', 'base'), ('FileB', 'origin'), ('FileA', 'left'),
 
2277
                ('FileA', 'merged'), ('FileB', 'right'),
2333
2278
                ]
2334
2279
        self.assertEqual({
2335
 
            keys[0]: b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',
2336
 
            keys[1]: b'00e364d235126be43292ab09cb4686cf703ddc17',
2337
 
            keys[2]: b'a8478686da38e370e32e42e8a0c220e33ee9132f',
2338
 
            keys[3]: b'ed8bce375198ea62444dc71952b22cfc2b09226d',
2339
 
            keys[4]: b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',
 
2280
            keys[0]: '51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',
 
2281
            keys[1]: '00e364d235126be43292ab09cb4686cf703ddc17',
 
2282
            keys[2]: 'a8478686da38e370e32e42e8a0c220e33ee9132f',
 
2283
            keys[3]: 'ed8bce375198ea62444dc71952b22cfc2b09226d',
 
2284
            keys[4]: '9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',
2340
2285
            },
2341
2286
            files.get_sha1s(keys))
2342
2287
 
2350
2295
        self.assertEqual(set(actual.keys()), set(expected.keys()))
2351
2296
        actual_parents = actual.get_parent_map(actual.keys())
2352
2297
        if self.graph:
2353
 
            self.assertEqual(
2354
 
                actual_parents, expected.get_parent_map(expected.keys()))
 
2298
            self.assertEqual(actual_parents, expected.get_parent_map(expected.keys()))
2355
2299
        else:
2356
2300
            for key, parents in actual_parents.items():
2357
2301
                self.assertEqual(None, parents)
2358
2302
        for key in actual.keys():
2359
 
            actual_text = next(actual.get_record_stream(
2360
 
                [key], 'unordered', True)).get_bytes_as('fulltext')
2361
 
            expected_text = next(expected.get_record_stream(
2362
 
                [key], 'unordered', True)).get_bytes_as('fulltext')
 
2303
            actual_text = actual.get_record_stream(
 
2304
                [key], 'unordered', True).next().get_bytes_as('fulltext')
 
2305
            expected_text = expected.get_record_stream(
 
2306
                [key], 'unordered', True).next().get_bytes_as('fulltext')
2363
2307
            self.assertEqual(actual_text, expected_text)
2364
2308
 
2365
2309
    def test_insert_record_stream_fulltexts(self):
2373
2317
            source_transport)
2374
2318
        self.get_diamond_files(source, trailing_eol=False)
2375
2319
        stream = source.get_record_stream(source.keys(), 'topological',
2376
 
                                          False)
 
2320
            False)
2377
2321
        files.insert_record_stream(stream)
2378
2322
        self.assertIdenticalVersionedFile(source, files)
2379
2323
 
2388
2332
            source_transport)
2389
2333
        self.get_diamond_files(source, trailing_eol=False)
2390
2334
        stream = source.get_record_stream(source.keys(), 'topological',
2391
 
                                          False)
 
2335
            False)
2392
2336
        files.insert_record_stream(stream)
2393
2337
        self.assertIdenticalVersionedFile(source, files)
2394
2338
 
2401
2345
        source = make_file_factory(True, mapper)(source_transport)
2402
2346
        self.get_diamond_files(source)
2403
2347
        stream = source.get_record_stream(source.keys(), 'topological',
2404
 
                                          False)
 
2348
            False)
2405
2349
        files.insert_record_stream(stream)
2406
2350
        self.assertIdenticalVersionedFile(source, files)
2407
2351
 
2414
2358
        source = make_file_factory(True, mapper)(source_transport)
2415
2359
        self.get_diamond_files(source, trailing_eol=False)
2416
2360
        stream = source.get_record_stream(source.keys(), 'topological',
2417
 
                                          False)
 
2361
            False)
2418
2362
        files.insert_record_stream(stream)
2419
2363
        self.assertIdenticalVersionedFile(source, files)
2420
2364
 
2427
2371
        source = make_file_factory(False, mapper)(source_transport)
2428
2372
        self.get_diamond_files(source)
2429
2373
        stream = source.get_record_stream(source.keys(), 'topological',
2430
 
                                          False)
 
2374
            False)
2431
2375
        files.insert_record_stream(stream)
2432
2376
        self.assertIdenticalVersionedFile(source, files)
2433
2377
 
2440
2384
        source = make_file_factory(False, mapper)(source_transport)
2441
2385
        self.get_diamond_files(source, trailing_eol=False)
2442
2386
        stream = source.get_record_stream(source.keys(), 'topological',
2443
 
                                          False)
 
2387
            False)
2444
2388
        files.insert_record_stream(stream)
2445
2389
        self.assertIdenticalVersionedFile(source, files)
2446
2390
 
2452
2396
        # insert some keys into f.
2453
2397
        self.get_diamond_files(files, left_only=True)
2454
2398
        stream = source.get_record_stream(source.keys(), 'topological',
2455
 
                                          False)
 
2399
            False)
2456
2400
        files.insert_record_stream(stream)
2457
2401
        self.assertIdenticalVersionedFile(source, files)
2458
2402
 
2460
2404
        """Inserting a stream with absent keys should raise an error."""
2461
2405
        files = self.get_versionedfiles()
2462
2406
        source = self.get_versionedfiles('source')
2463
 
        stream = source.get_record_stream([(b'missing',) * self.key_length],
2464
 
                                          'topological', False)
 
2407
        stream = source.get_record_stream([('missing',) * self.key_length],
 
2408
            'topological', False)
2465
2409
        self.assertRaises(errors.RevisionNotPresent, files.insert_record_stream,
2466
 
                          stream)
 
2410
            stream)
2467
2411
 
2468
2412
    def test_insert_record_stream_out_of_order(self):
2469
2413
        """An out of order stream can either error or work."""
2471
2415
        source = self.get_versionedfiles('source')
2472
2416
        self.get_diamond_files(source)
2473
2417
        if self.key_length == 1:
2474
 
            origin_keys = [(b'origin',)]
2475
 
            end_keys = [(b'merged',), (b'left',)]
2476
 
            start_keys = [(b'right',), (b'base',)]
 
2418
            origin_keys = [('origin',)]
 
2419
            end_keys = [('merged',), ('left',)]
 
2420
            start_keys = [('right',), ('base',)]
2477
2421
        else:
2478
 
            origin_keys = [(b'FileA', b'origin'), (b'FileB', b'origin')]
2479
 
            end_keys = [(b'FileA', b'merged',), (b'FileA', b'left',),
2480
 
                        (b'FileB', b'merged',), (b'FileB', b'left',)]
2481
 
            start_keys = [(b'FileA', b'right',), (b'FileA', b'base',),
2482
 
                          (b'FileB', b'right',), (b'FileB', b'base',)]
2483
 
        origin_entries = source.get_record_stream(
2484
 
            origin_keys, 'unordered', False)
 
2422
            origin_keys = [('FileA', 'origin'), ('FileB', 'origin')]
 
2423
            end_keys = [('FileA', 'merged',), ('FileA', 'left',),
 
2424
                ('FileB', 'merged',), ('FileB', 'left',)]
 
2425
            start_keys = [('FileA', 'right',), ('FileA', 'base',),
 
2426
                ('FileB', 'right',), ('FileB', 'base',)]
 
2427
        origin_entries = source.get_record_stream(origin_keys, 'unordered', False)
2485
2428
        end_entries = source.get_record_stream(end_keys, 'topological', False)
2486
 
        start_entries = source.get_record_stream(
2487
 
            start_keys, 'topological', False)
 
2429
        start_entries = source.get_record_stream(start_keys, 'topological', False)
2488
2430
        entries = itertools.chain(origin_entries, end_entries, start_entries)
2489
2431
        try:
2490
2432
            files.insert_record_stream(entries)
2503
2445
        source = self.get_versionedfiles('source')
2504
2446
        parents = ()
2505
2447
        keys = []
2506
 
        content = [(b'same same %d\n' % n) for n in range(500)]
2507
 
        letters = b'abcdefghijklmnopqrstuvwxyz'
2508
 
        for i in range(len(letters)):
2509
 
            letter = letters[i:i + 1]
2510
 
            key = (b'key-' + letter,)
 
2448
        content = [('same same %d\n' % n) for n in range(500)]
 
2449
        for letter in 'abcdefghijklmnopqrstuvwxyz':
 
2450
            key = ('key-' + letter,)
2511
2451
            if self.key_length == 2:
2512
 
                key = (b'prefix',) + key
2513
 
            content.append(b'content for ' + letter + b'\n')
 
2452
                key = ('prefix',) + key
 
2453
            content.append('content for ' + letter + '\n')
2514
2454
            source.add_lines(key, parents, content)
2515
2455
            keys.append(key)
2516
2456
            parents = (key,)
2542
2482
        source_transport.mkdir('.')
2543
2483
        source = make_file_factory(False, mapper)(source_transport)
2544
2484
        get_diamond_files(source, self.key_length, trailing_eol=True,
2545
 
                          nograph=False, left_only=False)
 
2485
            nograph=False, left_only=False)
2546
2486
        return source
2547
2487
 
2548
2488
    def test_insert_record_stream_delta_missing_basis_no_corruption(self):
2551
2491
        not added.
2552
2492
        """
2553
2493
        source = self.get_knit_delta_source()
2554
 
        keys = [self.get_simple_key(b'origin'), self.get_simple_key(b'merged')]
 
2494
        keys = [self.get_simple_key('origin'), self.get_simple_key('merged')]
2555
2495
        entries = source.get_record_stream(keys, 'unordered', False)
2556
2496
        files = self.get_versionedfiles()
2557
2497
        if self.support_partial_insertion:
2558
2498
            self.assertEqual([],
2559
 
                             list(files.get_missing_compression_parent_keys()))
 
2499
                list(files.get_missing_compression_parent_keys()))
2560
2500
            files.insert_record_stream(entries)
2561
2501
            missing_bases = files.get_missing_compression_parent_keys()
2562
 
            self.assertEqual({self.get_simple_key(b'left')},
2563
 
                             set(missing_bases))
 
2502
            self.assertEqual({self.get_simple_key('left')},
 
2503
                set(missing_bases))
2564
2504
            self.assertEqual(set(keys), set(files.get_parent_map(keys)))
2565
2505
        else:
2566
2506
            self.assertRaises(
2578
2518
            raise TestNotApplicable(
2579
2519
                'versioned file scenario does not support partial insertion')
2580
2520
        source = self.get_knit_delta_source()
2581
 
        entries = source.get_record_stream([self.get_simple_key(b'origin'),
2582
 
                                            self.get_simple_key(b'merged')], 'unordered', False)
 
2521
        entries = source.get_record_stream([self.get_simple_key('origin'),
 
2522
            self.get_simple_key('merged')], 'unordered', False)
2583
2523
        files = self.get_versionedfiles()
2584
2524
        files.insert_record_stream(entries)
2585
2525
        missing_bases = files.get_missing_compression_parent_keys()
2586
 
        self.assertEqual({self.get_simple_key(b'left')},
2587
 
                         set(missing_bases))
 
2526
        self.assertEqual({self.get_simple_key('left')},
 
2527
            set(missing_bases))
2588
2528
        # 'merged' is inserted (although a commit of a write group involving
2589
2529
        # this versionedfiles would fail).
2590
 
        merged_key = self.get_simple_key(b'merged')
 
2530
        merged_key = self.get_simple_key('merged')
2591
2531
        self.assertEqual(
2592
 
            [merged_key], list(files.get_parent_map([merged_key]).keys()))
 
2532
            [merged_key], files.get_parent_map([merged_key]).keys())
2593
2533
        # Add the full delta closure of the missing records
2594
2534
        missing_entries = source.get_record_stream(
2595
2535
            missing_bases, 'unordered', True)
2597
2537
        # Now 'merged' is fully inserted (and a commit would succeed).
2598
2538
        self.assertEqual([], list(files.get_missing_compression_parent_keys()))
2599
2539
        self.assertEqual(
2600
 
            [merged_key], list(files.get_parent_map([merged_key]).keys()))
 
2540
            [merged_key], files.get_parent_map([merged_key]).keys())
2601
2541
        files.check()
2602
2542
 
2603
2543
    def test_iter_lines_added_or_present_in_keys(self):
2617
2557
 
2618
2558
        files = self.get_versionedfiles()
2619
2559
        # add a base to get included
2620
 
        files.add_lines(self.get_simple_key(b'base'), (), [b'base\n'])
 
2560
        files.add_lines(self.get_simple_key('base'), (), ['base\n'])
2621
2561
        # add a ancestor to be included on one side
2622
 
        files.add_lines(self.get_simple_key(
2623
 
            b'lancestor'), (), [b'lancestor\n'])
 
2562
        files.add_lines(self.get_simple_key('lancestor'), (), ['lancestor\n'])
2624
2563
        # add a ancestor to be included on the other side
2625
 
        files.add_lines(self.get_simple_key(b'rancestor'),
2626
 
                        self.get_parents([self.get_simple_key(b'base')]), [b'rancestor\n'])
 
2564
        files.add_lines(self.get_simple_key('rancestor'),
 
2565
            self.get_parents([self.get_simple_key('base')]), ['rancestor\n'])
2627
2566
        # add a child of rancestor with no eofile-nl
2628
 
        files.add_lines(self.get_simple_key(b'child'),
2629
 
                        self.get_parents([self.get_simple_key(b'rancestor')]),
2630
 
                        [b'base\n', b'child\n'])
 
2567
        files.add_lines(self.get_simple_key('child'),
 
2568
            self.get_parents([self.get_simple_key('rancestor')]),
 
2569
            ['base\n', 'child\n'])
2631
2570
        # add a child of lancestor and base to join the two roots
2632
 
        files.add_lines(self.get_simple_key(b'otherchild'),
2633
 
                        self.get_parents([self.get_simple_key(b'lancestor'),
2634
 
                                          self.get_simple_key(b'base')]),
2635
 
                        [b'base\n', b'lancestor\n', b'otherchild\n'])
2636
 
 
 
2571
        files.add_lines(self.get_simple_key('otherchild'),
 
2572
            self.get_parents([self.get_simple_key('lancestor'),
 
2573
                self.get_simple_key('base')]),
 
2574
            ['base\n', 'lancestor\n', 'otherchild\n'])
2637
2575
        def iter_with_keys(keys, expected):
2638
2576
            # now we need to see what lines are returned, and how often.
2639
2577
            lines = {}
2640
2578
            progress = InstrumentedProgress()
2641
2579
            # iterate over the lines
2642
2580
            for line in files.iter_lines_added_or_present_in_keys(keys,
2643
 
                                                                  pb=progress):
 
2581
                pb=progress):
2644
2582
                lines.setdefault(line, 0)
2645
2583
                lines[line] += 1
2646
 
            if [] != progress.updates:
 
2584
            if []!= progress.updates:
2647
2585
                self.assertEqual(expected, progress.updates)
2648
2586
            return lines
2649
2587
        lines = iter_with_keys(
2650
 
            [self.get_simple_key(b'child'),
2651
 
             self.get_simple_key(b'otherchild')],
 
2588
            [self.get_simple_key('child'), self.get_simple_key('otherchild')],
2652
2589
            [('Walking content', 0, 2),
2653
2590
             ('Walking content', 1, 2),
2654
2591
             ('Walking content', 2, 2)])
2655
2592
        # we must see child and otherchild
2656
 
        self.assertTrue(lines[(b'child\n', self.get_simple_key(b'child'))] > 0)
 
2593
        self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2657
2594
        self.assertTrue(
2658
 
            lines[(b'otherchild\n', self.get_simple_key(b'otherchild'))] > 0)
 
2595
            lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2659
2596
        # we dont care if we got more than that.
2660
2597
 
2661
2598
        # test all lines
2662
2599
        lines = iter_with_keys(files.keys(),
2663
 
                               [('Walking content', 0, 5),
2664
 
                                ('Walking content', 1, 5),
2665
 
                                ('Walking content', 2, 5),
2666
 
                                ('Walking content', 3, 5),
2667
 
                                ('Walking content', 4, 5),
2668
 
                                ('Walking content', 5, 5)])
 
2600
            [('Walking content', 0, 5),
 
2601
             ('Walking content', 1, 5),
 
2602
             ('Walking content', 2, 5),
 
2603
             ('Walking content', 3, 5),
 
2604
             ('Walking content', 4, 5),
 
2605
             ('Walking content', 5, 5)])
2669
2606
        # all lines must be seen at least once
2670
 
        self.assertTrue(lines[(b'base\n', self.get_simple_key(b'base'))] > 0)
2671
 
        self.assertTrue(
2672
 
            lines[(b'lancestor\n', self.get_simple_key(b'lancestor'))] > 0)
2673
 
        self.assertTrue(
2674
 
            lines[(b'rancestor\n', self.get_simple_key(b'rancestor'))] > 0)
2675
 
        self.assertTrue(lines[(b'child\n', self.get_simple_key(b'child'))] > 0)
2676
 
        self.assertTrue(
2677
 
            lines[(b'otherchild\n', self.get_simple_key(b'otherchild'))] > 0)
 
2607
        self.assertTrue(lines[('base\n', self.get_simple_key('base'))] > 0)
 
2608
        self.assertTrue(
 
2609
            lines[('lancestor\n', self.get_simple_key('lancestor'))] > 0)
 
2610
        self.assertTrue(
 
2611
            lines[('rancestor\n', self.get_simple_key('rancestor'))] > 0)
 
2612
        self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
 
2613
        self.assertTrue(
 
2614
            lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2678
2615
 
2679
2616
    def test_make_mpdiffs(self):
2680
2617
        from breezy import multiparent
2682
2619
        # add texts that should trip the knit maximum delta chain threshold
2683
2620
        # as well as doing parallel chains of data in knits.
2684
2621
        # this is done by two chains of 25 insertions
2685
 
        files.add_lines(self.get_simple_key(b'base'), [], [b'line\n'])
2686
 
        files.add_lines(self.get_simple_key(b'noeol'),
2687
 
                        self.get_parents([self.get_simple_key(b'base')]), [b'line'])
 
2622
        files.add_lines(self.get_simple_key('base'), [], ['line\n'])
 
2623
        files.add_lines(self.get_simple_key('noeol'),
 
2624
            self.get_parents([self.get_simple_key('base')]), ['line'])
2688
2625
        # detailed eol tests:
2689
2626
        # shared last line with parent no-eol
2690
 
        files.add_lines(self.get_simple_key(b'noeolsecond'),
2691
 
                        self.get_parents([self.get_simple_key(b'noeol')]),
2692
 
                        [b'line\n', b'line'])
 
2627
        files.add_lines(self.get_simple_key('noeolsecond'),
 
2628
            self.get_parents([self.get_simple_key('noeol')]),
 
2629
                ['line\n', 'line'])
2693
2630
        # differing last line with parent, both no-eol
2694
 
        files.add_lines(self.get_simple_key(b'noeolnotshared'),
2695
 
                        self.get_parents(
2696
 
                            [self.get_simple_key(b'noeolsecond')]),
2697
 
                        [b'line\n', b'phone'])
 
2631
        files.add_lines(self.get_simple_key('noeolnotshared'),
 
2632
            self.get_parents([self.get_simple_key('noeolsecond')]),
 
2633
                ['line\n', 'phone'])
2698
2634
        # add eol following a noneol parent, change content
2699
 
        files.add_lines(self.get_simple_key(b'eol'),
2700
 
                        self.get_parents([self.get_simple_key(b'noeol')]), [b'phone\n'])
 
2635
        files.add_lines(self.get_simple_key('eol'),
 
2636
            self.get_parents([self.get_simple_key('noeol')]), ['phone\n'])
2701
2637
        # add eol following a noneol parent, no change content
2702
 
        files.add_lines(self.get_simple_key(b'eolline'),
2703
 
                        self.get_parents([self.get_simple_key(b'noeol')]), [b'line\n'])
 
2638
        files.add_lines(self.get_simple_key('eolline'),
 
2639
            self.get_parents([self.get_simple_key('noeol')]), ['line\n'])
2704
2640
        # noeol with no parents:
2705
 
        files.add_lines(self.get_simple_key(b'noeolbase'), [], [b'line'])
 
2641
        files.add_lines(self.get_simple_key('noeolbase'), [], ['line'])
2706
2642
        # noeol preceeding its leftmost parent in the output:
2707
2643
        # this is done by making it a merge of two parents with no common
2708
2644
        # anestry: noeolbase and noeol with the
2709
2645
        # later-inserted parent the leftmost.
2710
 
        files.add_lines(self.get_simple_key(b'eolbeforefirstparent'),
2711
 
                        self.get_parents([self.get_simple_key(b'noeolbase'),
2712
 
                                          self.get_simple_key(b'noeol')]),
2713
 
                        [b'line'])
 
2646
        files.add_lines(self.get_simple_key('eolbeforefirstparent'),
 
2647
            self.get_parents([self.get_simple_key('noeolbase'),
 
2648
                self.get_simple_key('noeol')]),
 
2649
            ['line'])
2714
2650
        # two identical eol texts
2715
 
        files.add_lines(self.get_simple_key(b'noeoldup'),
2716
 
                        self.get_parents([self.get_simple_key(b'noeol')]), [b'line'])
2717
 
        next_parent = self.get_simple_key(b'base')
2718
 
        text_name = b'chain1-'
2719
 
        text = [b'line\n']
2720
 
        sha1s = {0: b'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
2721
 
                 1: b'45e21ea146a81ea44a821737acdb4f9791c8abe7',
2722
 
                 2: b'e1f11570edf3e2a070052366c582837a4fe4e9fa',
2723
 
                 3: b'26b4b8626da827088c514b8f9bbe4ebf181edda1',
2724
 
                 4: b'e28a5510be25ba84d31121cff00956f9970ae6f6',
2725
 
                 5: b'd63ec0ce22e11dcf65a931b69255d3ac747a318d',
2726
 
                 6: b'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
2727
 
                 7: b'95c14da9cafbf828e3e74a6f016d87926ba234ab',
2728
 
                 8: b'779e9a0b28f9f832528d4b21e17e168c67697272',
2729
 
                 9: b'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
2730
 
                 10: b'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
2731
 
                 11: b'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
2732
 
                 12: b'31a2286267f24d8bedaa43355f8ad7129509ea85',
2733
 
                 13: b'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
2734
 
                 14: b'2c4b1736566b8ca6051e668de68650686a3922f2',
2735
 
                 15: b'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
2736
 
                 16: b'b0d2e18d3559a00580f6b49804c23fea500feab3',
2737
 
                 17: b'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
2738
 
                 18: b'5cf64a3459ae28efa60239e44b20312d25b253f3',
2739
 
                 19: b'1ebed371807ba5935958ad0884595126e8c4e823',
2740
 
                 20: b'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
2741
 
                 21: b'01edc447978004f6e4e962b417a4ae1955b6fe5d',
2742
 
                 22: b'd8d8dc49c4bf0bab401e0298bb5ad827768618bb',
2743
 
                 23: b'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
2744
 
                 24: b'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
2745
 
                 25: b'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
 
2651
        files.add_lines(self.get_simple_key('noeoldup'),
 
2652
            self.get_parents([self.get_simple_key('noeol')]), ['line'])
 
2653
        next_parent = self.get_simple_key('base')
 
2654
        text_name = 'chain1-'
 
2655
        text = ['line\n']
 
2656
        sha1s = {0: 'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
 
2657
                 1: '45e21ea146a81ea44a821737acdb4f9791c8abe7',
 
2658
                 2: 'e1f11570edf3e2a070052366c582837a4fe4e9fa',
 
2659
                 3: '26b4b8626da827088c514b8f9bbe4ebf181edda1',
 
2660
                 4: 'e28a5510be25ba84d31121cff00956f9970ae6f6',
 
2661
                 5: 'd63ec0ce22e11dcf65a931b69255d3ac747a318d',
 
2662
                 6: '2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
 
2663
                 7: '95c14da9cafbf828e3e74a6f016d87926ba234ab',
 
2664
                 8: '779e9a0b28f9f832528d4b21e17e168c67697272',
 
2665
                 9: '1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
 
2666
                 10: '131a2ae712cf51ed62f143e3fbac3d4206c25a05',
 
2667
                 11: 'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
 
2668
                 12: '31a2286267f24d8bedaa43355f8ad7129509ea85',
 
2669
                 13: 'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
 
2670
                 14: '2c4b1736566b8ca6051e668de68650686a3922f2',
 
2671
                 15: '5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
 
2672
                 16: 'b0d2e18d3559a00580f6b49804c23fea500feab3',
 
2673
                 17: '8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
 
2674
                 18: '5cf64a3459ae28efa60239e44b20312d25b253f3',
 
2675
                 19: '1ebed371807ba5935958ad0884595126e8c4e823',
 
2676
                 20: '2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
 
2677
                 21: '01edc447978004f6e4e962b417a4ae1955b6fe5d',
 
2678
                 22: 'd8d8dc49c4bf0bab401e0298bb5ad827768618bb',
 
2679
                 23: 'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
 
2680
                 24: 'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
 
2681
                 25: 'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
2746
2682
                 }
2747
2683
        for depth in range(26):
2748
 
            new_version = self.get_simple_key(text_name + b'%d' % depth)
2749
 
            text = text + [b'line\n']
 
2684
            new_version = self.get_simple_key(text_name + '%s' % depth)
 
2685
            text = text + ['line\n']
2750
2686
            files.add_lines(new_version, self.get_parents([next_parent]), text)
2751
2687
            next_parent = new_version
2752
 
        next_parent = self.get_simple_key(b'base')
2753
 
        text_name = b'chain2-'
2754
 
        text = [b'line\n']
 
2688
        next_parent = self.get_simple_key('base')
 
2689
        text_name = 'chain2-'
 
2690
        text = ['line\n']
2755
2691
        for depth in range(26):
2756
 
            new_version = self.get_simple_key(text_name + b'%d' % depth)
2757
 
            text = text + [b'line\n']
 
2692
            new_version = self.get_simple_key(text_name + '%s' % depth)
 
2693
            text = text + ['line\n']
2758
2694
            files.add_lines(new_version, self.get_parents([next_parent]), text)
2759
2695
            next_parent = new_version
2760
2696
        target = self.get_versionedfiles('target')
2764
2700
            target.add_mpdiffs(
2765
2701
                [(key, parents, files.get_sha1s([key])[key], mpdiff)])
2766
2702
            self.assertEqualDiff(
2767
 
                next(files.get_record_stream([key], 'unordered',
2768
 
                                             True)).get_bytes_as('fulltext'),
2769
 
                next(target.get_record_stream([key], 'unordered',
2770
 
                                              True)).get_bytes_as('fulltext')
 
2703
                files.get_record_stream([key], 'unordered',
 
2704
                    True).next().get_bytes_as('fulltext'),
 
2705
                target.get_record_stream([key], 'unordered',
 
2706
                    True).next().get_bytes_as('fulltext')
2771
2707
                )
2772
2708
 
2773
2709
    def test_keys(self):
2776
2712
        files = self.get_versionedfiles()
2777
2713
        self.assertEqual(set(), set(files.keys()))
2778
2714
        if self.key_length == 1:
2779
 
            key = (b'foo',)
 
2715
            key = ('foo',)
2780
2716
        else:
2781
 
            key = (b'foo', b'bar',)
 
2717
            key = ('foo', 'bar',)
2782
2718
        files.add_lines(key, (), [])
2783
2719
        self.assertEqual({key}, set(files.keys()))
2784
2720
 
2802
2738
 
2803
2739
    def test_add_lines(self):
2804
2740
        self.assertRaises(NotImplementedError,
2805
 
                          self.texts.add_lines, b"foo", [], [])
 
2741
                self.texts.add_lines, "foo", [], [])
2806
2742
 
2807
2743
    def test_add_mpdiffs(self):
2808
2744
        self.assertRaises(NotImplementedError,
2809
 
                          self.texts.add_mpdiffs, [])
 
2745
                self.texts.add_mpdiffs, [])
2810
2746
 
2811
2747
    def test_check_noerrors(self):
2812
2748
        self.texts.check()
2816
2752
                          [])
2817
2753
 
2818
2754
    def test_get_sha1s_nonexistent(self):
2819
 
        self.assertEqual({}, self.texts.get_sha1s([(b"NONEXISTENT",)]))
 
2755
        self.assertEqual({}, self.texts.get_sha1s([("NONEXISTENT",)]))
2820
2756
 
2821
2757
    def test_get_sha1s(self):
2822
 
        self._lines[b"key"] = [b"dataline1", b"dataline2"]
2823
 
        self.assertEqual({(b"key",): osutils.sha_strings(self._lines[b"key"])},
2824
 
                         self.texts.get_sha1s([(b"key",)]))
 
2758
        self._lines["key"] = ["dataline1", "dataline2"]
 
2759
        self.assertEqual({("key",): osutils.sha_strings(self._lines["key"])},
 
2760
                           self.texts.get_sha1s([("key",)]))
2825
2761
 
2826
2762
    def test_get_parent_map(self):
2827
 
        self._parent_map = {b"G": (b"A", b"B")}
2828
 
        self.assertEqual({(b"G",): ((b"A",), (b"B",))},
2829
 
                         self.texts.get_parent_map([(b"G",), (b"L",)]))
 
2763
        self._parent_map = {"G": ("A", "B")}
 
2764
        self.assertEqual({("G",): (("A",), ("B",))},
 
2765
                          self.texts.get_parent_map([("G",), ("L",)]))
2830
2766
 
2831
2767
    def test_get_record_stream(self):
2832
 
        self._lines[b"A"] = [b"FOO", b"BAR"]
2833
 
        it = self.texts.get_record_stream([(b"A",)], "unordered", True)
 
2768
        self._lines["A"] = ["FOO", "BAR"]
 
2769
        it = self.texts.get_record_stream([("A",)], "unordered", True)
2834
2770
        record = next(it)
2835
2771
        self.assertEqual("chunked", record.storage_kind)
2836
 
        self.assertEqual(b"FOOBAR", record.get_bytes_as("fulltext"))
2837
 
        self.assertEqual([b"FOO", b"BAR"], record.get_bytes_as("chunked"))
 
2772
        self.assertEqual("FOOBAR", record.get_bytes_as("fulltext"))
 
2773
        self.assertEqual(["FOO", "BAR"], record.get_bytes_as("chunked"))
2838
2774
 
2839
2775
    def test_get_record_stream_absent(self):
2840
 
        it = self.texts.get_record_stream([(b"A",)], "unordered", True)
 
2776
        it = self.texts.get_record_stream([("A",)], "unordered", True)
2841
2777
        record = next(it)
2842
2778
        self.assertEqual("absent", record.storage_kind)
2843
2779
 
2844
2780
    def test_iter_lines_added_or_present_in_keys(self):
2845
 
        self._lines[b"A"] = [b"FOO", b"BAR"]
2846
 
        self._lines[b"B"] = [b"HEY"]
2847
 
        self._lines[b"C"] = [b"Alberta"]
2848
 
        it = self.texts.iter_lines_added_or_present_in_keys([(b"A",), (b"B",)])
2849
 
        self.assertEqual(sorted([(b"FOO", b"A"), (b"BAR", b"A"), (b"HEY", b"B")]),
2850
 
                         sorted(list(it)))
 
2781
        self._lines["A"] = ["FOO", "BAR"]
 
2782
        self._lines["B"] = ["HEY"]
 
2783
        self._lines["C"] = ["Alberta"]
 
2784
        it = self.texts.iter_lines_added_or_present_in_keys([("A",), ("B",)])
 
2785
        self.assertEqual(sorted([("FOO", "A"), ("BAR", "A"), ("HEY", "B")]),
 
2786
            sorted(list(it)))
2851
2787
 
2852
2788
 
2853
2789
class TestOrderingVersionedFilesDecorator(TestCaseWithMemoryTransport):
2856
2792
        builder = self.make_branch_builder('test')
2857
2793
        builder.start_series()
2858
2794
        builder.build_snapshot(None, [
2859
 
            ('add', ('', b'TREE_ROOT', 'directory', None))],
2860
 
            revision_id=b'A')
2861
 
        builder.build_snapshot([b'A'], [], revision_id=b'B')
2862
 
        builder.build_snapshot([b'B'], [], revision_id=b'C')
2863
 
        builder.build_snapshot([b'C'], [], revision_id=b'D')
 
2795
            ('add', ('', 'TREE_ROOT', 'directory', None))],
 
2796
            revision_id='A')
 
2797
        builder.build_snapshot(['A'], [], revision_id='B')
 
2798
        builder.build_snapshot(['B'], [], revision_id='C')
 
2799
        builder.build_snapshot(['C'], [], revision_id='D')
2864
2800
        builder.finish_series()
2865
2801
        b = builder.get_branch()
2866
2802
        b.lock_read()
2873
2809
        self.assertEqual([], vf.calls)
2874
2810
 
2875
2811
    def test_get_record_stream_topological(self):
2876
 
        vf = self.get_ordering_vf(
2877
 
            {(b'A',): 3, (b'B',): 2, (b'C',): 4, (b'D',): 1})
2878
 
        request_keys = [(b'B',), (b'C',), (b'D',), (b'A',)]
 
2812
        vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
 
2813
        request_keys = [('B',), ('C',), ('D',), ('A',)]
2879
2814
        keys = [r.key for r in vf.get_record_stream(request_keys,
2880
 
                                                    'topological', False)]
 
2815
                                    'topological', False)]
2881
2816
        # We should have gotten the keys in topological order
2882
 
        self.assertEqual([(b'A',), (b'B',), (b'C',), (b'D',)], keys)
 
2817
        self.assertEqual([('A',), ('B',), ('C',), ('D',)], keys)
2883
2818
        # And recorded that the request was made
2884
2819
        self.assertEqual([('get_record_stream', request_keys, 'topological',
2885
2820
                           False)], vf.calls)
2886
2821
 
2887
2822
    def test_get_record_stream_ordered(self):
2888
 
        vf = self.get_ordering_vf(
2889
 
            {(b'A',): 3, (b'B',): 2, (b'C',): 4, (b'D',): 1})
2890
 
        request_keys = [(b'B',), (b'C',), (b'D',), (b'A',)]
 
2823
        vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
 
2824
        request_keys = [('B',), ('C',), ('D',), ('A',)]
2891
2825
        keys = [r.key for r in vf.get_record_stream(request_keys,
2892
 
                                                    'unordered', False)]
 
2826
                                   'unordered', False)]
2893
2827
        # They should be returned based on their priority
2894
 
        self.assertEqual([(b'D',), (b'B',), (b'A',), (b'C',)], keys)
 
2828
        self.assertEqual([('D',), ('B',), ('A',), ('C',)], keys)
2895
2829
        # And the request recorded
2896
2830
        self.assertEqual([('get_record_stream', request_keys, 'unordered',
2897
2831
                           False)], vf.calls)
2898
2832
 
2899
2833
    def test_get_record_stream_implicit_order(self):
2900
 
        vf = self.get_ordering_vf({(b'B',): 2, (b'D',): 1})
2901
 
        request_keys = [(b'B',), (b'C',), (b'D',), (b'A',)]
 
2834
        vf = self.get_ordering_vf({('B',): 2, ('D',): 1})
 
2835
        request_keys = [('B',), ('C',), ('D',), ('A',)]
2902
2836
        keys = [r.key for r in vf.get_record_stream(request_keys,
2903
 
                                                    'unordered', False)]
 
2837
                                   'unordered', False)]
2904
2838
        # A and C are not in the map, so they get sorted to the front. A comes
2905
2839
        # before C alphabetically, so it comes back first
2906
 
        self.assertEqual([(b'A',), (b'C',), (b'D',), (b'B',)], keys)
 
2840
        self.assertEqual([('A',), ('C',), ('D',), ('B',)], keys)
2907
2841
        # And the request recorded
2908
2842
        self.assertEqual([('get_record_stream', request_keys, 'unordered',
2909
2843
                           False)], vf.calls)