82
80
:param trailing_eol: If True end the last line with \n.
86
'base': (('origin',),),
88
'right': (('base',),),
89
'merged': (('left',), ('right',)),
84
b'base': ((b'origin',),),
85
b'left': ((b'base',),),
86
b'right': ((b'base',),),
87
b'merged': ((b'left',), (b'right',)),
91
89
# insert a diamond graph to exercise deltas and merges.
96
f.add_lines('origin', [], ['origin' + last_char])
97
f.add_lines('base', ['origin'], ['base' + last_char])
98
f.add_lines('left', ['base'], ['base\n', 'left' + last_char])
94
f.add_lines(b'origin', [], [b'origin' + last_char])
95
f.add_lines(b'base', [b'origin'], [b'base' + last_char])
96
f.add_lines(b'left', [b'base'], [b'base\n', b'left' + last_char])
100
f.add_lines('right', ['base'],
101
['base\n', 'right' + last_char])
102
f.add_lines('merged', ['left', 'right'],
103
['base\n', 'left\n', 'right\n', 'merged' + last_char])
98
f.add_lines(b'right', [b'base'],
99
[b'base\n', b'right' + last_char])
100
f.add_lines(b'merged', [b'left', b'right'],
101
[b'base\n', b'left\n', b'right\n', b'merged' + last_char])
104
102
return f, parents
107
105
def get_diamond_files(files, key_length, trailing_eol=True, left_only=False,
108
nograph=False, nokeys=False):
106
nograph=False, nokeys=False):
109
107
"""Get a diamond graph to exercise deltas and merges.
111
109
This creates a 5-node graph in files. If files supports 2-length keys two
148
148
# we loop over each key because that spreads the inserts across prefixes,
149
149
# which is how commit operates.
150
150
for prefix in prefixes:
151
result.append(files.add_lines(prefix + get_key('origin'), (),
152
['origin' + last_char]))
153
for prefix in prefixes:
154
result.append(files.add_lines(prefix + get_key('base'),
155
get_parents([('origin',)]), ['base' + last_char]))
156
for prefix in prefixes:
157
result.append(files.add_lines(prefix + get_key('left'),
158
get_parents([('base',)]),
159
['base\n', 'left' + last_char]))
151
result.append(files.add_lines(prefix + get_key(b'origin'), (),
152
[b'origin' + last_char]))
153
for prefix in prefixes:
154
result.append(files.add_lines(prefix + get_key(b'base'),
155
get_parents([(b'origin',)]), [b'base' + last_char]))
156
for prefix in prefixes:
157
result.append(files.add_lines(prefix + get_key(b'left'),
158
get_parents([(b'base',)]),
159
[b'base\n', b'left' + last_char]))
160
160
if not left_only:
161
161
for prefix in prefixes:
162
result.append(files.add_lines(prefix + get_key('right'),
163
get_parents([('base',)]),
164
['base\n', 'right' + last_char]))
162
result.append(files.add_lines(prefix + get_key(b'right'),
163
get_parents([(b'base',)]),
164
[b'base\n', b'right' + last_char]))
165
165
for prefix in prefixes:
166
result.append(files.add_lines(prefix + get_key('merged'),
167
get_parents([('left',), ('right',)]),
168
['base\n', 'left\n', 'right\n', 'merged' + last_char]))
166
result.append(files.add_lines(prefix + get_key(b'merged'),
168
[(b'left',), (b'right',)]),
169
[b'base\n', b'left\n', b'right\n', b'merged' + last_char]))
185
186
def test_add(self):
186
187
f = self.get_file()
187
f.add_lines('r0', [], ['a\n', 'b\n'])
188
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
188
f.add_lines(b'r0', [], [b'a\n', b'b\n'])
189
f.add_lines(b'r1', [b'r0'], [b'b\n', b'c\n'])
189
191
def verify_file(f):
190
192
versions = f.versions()
191
self.assertTrue('r0' in versions)
192
self.assertTrue('r1' in versions)
193
self.assertEqual(f.get_lines('r0'), ['a\n', 'b\n'])
194
self.assertEqual(f.get_text('r0'), 'a\nb\n')
195
self.assertEqual(f.get_lines('r1'), ['b\n', 'c\n'])
193
self.assertTrue(b'r0' in versions)
194
self.assertTrue(b'r1' in versions)
195
self.assertEqual(f.get_lines(b'r0'), [b'a\n', b'b\n'])
196
self.assertEqual(f.get_text(b'r0'), b'a\nb\n')
197
self.assertEqual(f.get_lines(b'r1'), [b'b\n', b'c\n'])
196
198
self.assertEqual(2, len(f))
197
199
self.assertEqual(2, f.num_versions())
199
201
self.assertRaises(RevisionNotPresent,
200
f.add_lines, 'r2', ['foo'], [])
202
f.add_lines, b'r2', [b'foo'], [])
201
203
self.assertRaises(RevisionAlreadyPresent,
202
f.add_lines, 'r1', [], [])
204
f.add_lines, b'r1', [], [])
204
206
# this checks that reopen with create=True does not break anything.
205
207
f = self.reopen_file(create=True)
208
210
def test_adds_with_parent_texts(self):
209
211
f = self.get_file()
210
212
parent_texts = {}
211
_, _, parent_texts['r0'] = f.add_lines('r0', [], ['a\n', 'b\n'])
213
_, _, parent_texts[b'r0'] = f.add_lines(b'r0', [], [b'a\n', b'b\n'])
213
_, _, parent_texts['r1'] = f.add_lines_with_ghosts('r1',
214
['r0', 'ghost'], ['b\n', 'c\n'], parent_texts=parent_texts)
215
_, _, parent_texts[b'r1'] = f.add_lines_with_ghosts(b'r1',
216
[b'r0', b'ghost'], [b'b\n', b'c\n'], parent_texts=parent_texts)
215
217
except NotImplementedError:
216
218
# if the format doesn't support ghosts, just add normally.
217
_, _, parent_texts['r1'] = f.add_lines('r1',
218
['r0'], ['b\n', 'c\n'], parent_texts=parent_texts)
219
f.add_lines('r2', ['r1'], ['c\n', 'd\n'], parent_texts=parent_texts)
220
self.assertNotEqual(None, parent_texts['r0'])
221
self.assertNotEqual(None, parent_texts['r1'])
219
_, _, parent_texts[b'r1'] = f.add_lines(b'r1',
220
[b'r0'], [b'b\n', b'c\n'], parent_texts=parent_texts)
221
f.add_lines(b'r2', [b'r1'], [b'c\n', b'd\n'],
222
parent_texts=parent_texts)
223
self.assertNotEqual(None, parent_texts[b'r0'])
224
self.assertNotEqual(None, parent_texts[b'r1'])
222
226
def verify_file(f):
223
227
versions = f.versions()
224
self.assertTrue('r0' in versions)
225
self.assertTrue('r1' in versions)
226
self.assertTrue('r2' in versions)
227
self.assertEqual(f.get_lines('r0'), ['a\n', 'b\n'])
228
self.assertEqual(f.get_lines('r1'), ['b\n', 'c\n'])
229
self.assertEqual(f.get_lines('r2'), ['c\n', 'd\n'])
228
self.assertTrue(b'r0' in versions)
229
self.assertTrue(b'r1' in versions)
230
self.assertTrue(b'r2' in versions)
231
self.assertEqual(f.get_lines(b'r0'), [b'a\n', b'b\n'])
232
self.assertEqual(f.get_lines(b'r1'), [b'b\n', b'c\n'])
233
self.assertEqual(f.get_lines(b'r2'), [b'c\n', b'd\n'])
230
234
self.assertEqual(3, f.num_versions())
231
origins = f.annotate('r1')
232
self.assertEqual(origins[0][0], 'r0')
233
self.assertEqual(origins[1][0], 'r1')
234
origins = f.annotate('r2')
235
self.assertEqual(origins[0][0], 'r1')
236
self.assertEqual(origins[1][0], 'r2')
235
origins = f.annotate(b'r1')
236
self.assertEqual(origins[0][0], b'r0')
237
self.assertEqual(origins[1][0], b'r1')
238
origins = f.annotate(b'r2')
239
self.assertEqual(origins[0][0], b'r1')
240
self.assertEqual(origins[1][0], b'r2')
239
243
f = self.reopen_file()
258
262
vf = self.get_file()
259
263
if isinstance(vf, WeaveFile):
260
264
raise TestSkipped("WeaveFile ignores left_matching_blocks")
261
vf.add_lines('1', [], ['a\n'])
262
vf.add_lines('2', ['1'], ['a\n', 'a\n', 'a\n'],
265
vf.add_lines(b'1', [], [b'a\n'])
266
vf.add_lines(b'2', [b'1'], [b'a\n', b'a\n', b'a\n'],
263
267
left_matching_blocks=[(0, 0, 1), (1, 3, 0)])
264
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('2'))
265
vf.add_lines('3', ['1'], ['a\n', 'a\n', 'a\n'],
268
self.assertEqual([b'a\n', b'a\n', b'a\n'], vf.get_lines(b'2'))
269
vf.add_lines(b'3', [b'1'], [b'a\n', b'a\n', b'a\n'],
266
270
left_matching_blocks=[(0, 2, 1), (1, 3, 0)])
267
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('3'))
271
self.assertEqual([b'a\n', b'a\n', b'a\n'], vf.get_lines(b'3'))
269
273
def test_inline_newline_throws(self):
270
274
# \r characters are not permitted in lines being added
271
275
vf = self.get_file()
272
276
self.assertRaises(errors.BzrBadParameterContainsNewline,
273
vf.add_lines, 'a', [], ['a\n\n'])
277
vf.add_lines, b'a', [], [b'a\n\n'])
274
278
self.assertRaises(
275
279
(errors.BzrBadParameterContainsNewline, NotImplementedError),
276
vf.add_lines_with_ghosts, 'a', [], ['a\n\n'])
280
vf.add_lines_with_ghosts, b'a', [], [b'a\n\n'])
277
281
# but inline CR's are allowed
278
vf.add_lines('a', [], ['a\r\n'])
282
vf.add_lines(b'a', [], [b'a\r\n'])
280
vf.add_lines_with_ghosts('b', [], ['a\r\n'])
284
vf.add_lines_with_ghosts(b'b', [], [b'a\r\n'])
281
285
except NotImplementedError:
284
288
def test_add_reserved(self):
285
289
vf = self.get_file()
286
290
self.assertRaises(errors.ReservedId,
287
vf.add_lines, 'a:', [], ['a\n', 'b\n', 'c\n'])
291
vf.add_lines, b'a:', [], [b'a\n', b'b\n', b'c\n'])
289
293
def test_add_lines_nostoresha(self):
290
294
"""When nostore_sha is supplied using old content raises."""
291
295
vf = self.get_file()
292
empty_text = ('a', [])
293
sample_text_nl = ('b', ["foo\n", "bar\n"])
294
sample_text_no_nl = ('c', ["foo\n", "bar"])
296
empty_text = (b'a', [])
297
sample_text_nl = (b'b', [b"foo\n", b"bar\n"])
298
sample_text_no_nl = (b'c', [b"foo\n", b"bar"])
296
300
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
297
301
sha, _, _ = vf.add_lines(version, [], lines)
299
303
# we now have a copy of all the lines in the vf.
300
304
for sha, (version, lines) in zip(
301
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
305
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
302
306
self.assertRaises(errors.ExistingContent,
303
vf.add_lines, version + "2", [], lines,
307
vf.add_lines, version + b"2", [], lines,
305
309
# and no new version should have been added.
306
310
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
309
313
def test_add_lines_with_ghosts_nostoresha(self):
310
314
"""When nostore_sha is supplied using old content raises."""
311
315
vf = self.get_file()
312
empty_text = ('a', [])
313
sample_text_nl = ('b', ["foo\n", "bar\n"])
314
sample_text_no_nl = ('c', ["foo\n", "bar"])
316
empty_text = (b'a', [])
317
sample_text_nl = (b'b', [b"foo\n", b"bar\n"])
318
sample_text_no_nl = (b'c', [b"foo\n", b"bar"])
316
320
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
317
321
sha, _, _ = vf.add_lines(version, [], lines)
319
323
# we now have a copy of all the lines in the vf.
320
324
# is the test applicable to this vf implementation?
322
vf.add_lines_with_ghosts('d', [], [])
326
vf.add_lines_with_ghosts(b'd', [], [])
323
327
except NotImplementedError:
324
328
raise TestSkipped("add_lines_with_ghosts is optional")
325
329
for sha, (version, lines) in zip(
326
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
330
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
327
331
self.assertRaises(errors.ExistingContent,
328
vf.add_lines_with_ghosts, version + "2", [], lines,
332
vf.add_lines_with_ghosts, version + b"2", [], lines,
330
334
# and no new version should have been added.
331
335
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
334
338
def test_add_lines_return_value(self):
335
339
# add_lines should return the sha1 and the text size.
336
340
vf = self.get_file()
337
empty_text = ('a', [])
338
sample_text_nl = ('b', ["foo\n", "bar\n"])
339
sample_text_no_nl = ('c', ["foo\n", "bar"])
341
empty_text = (b'a', [])
342
sample_text_nl = (b'b', [b"foo\n", b"bar\n"])
343
sample_text_no_nl = (b'c', [b"foo\n", b"bar"])
340
344
# check results for the three cases:
341
345
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
342
346
# the first two elements are the same for all versioned files:
421
425
# Test adding this in two situations:
422
426
# On top of a new insertion
423
427
vf = self.get_file('fulltext')
424
vf.add_lines('noeol', [], ['line'])
425
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
426
left_matching_blocks=[(0, 1, 1)])
427
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
428
vf.add_lines(b'noeol', [], [b'line'])
429
vf.add_lines(b'noeol2', [b'noeol'], [b'newline\n', b'line'],
430
left_matching_blocks=[(0, 1, 1)])
431
self.assertEqualDiff(b'newline\nline', vf.get_text(b'noeol2'))
428
432
# On top of a delta
429
433
vf = self.get_file('delta')
430
vf.add_lines('base', [], ['line'])
431
vf.add_lines('noeol', ['base'], ['prelude\n', 'line'])
432
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
433
left_matching_blocks=[(1, 1, 1)])
434
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
434
vf.add_lines(b'base', [], [b'line'])
435
vf.add_lines(b'noeol', [b'base'], [b'prelude\n', b'line'])
436
vf.add_lines(b'noeol2', [b'noeol'], [b'newline\n', b'line'],
437
left_matching_blocks=[(1, 1, 1)])
438
self.assertEqualDiff(b'newline\nline', vf.get_text(b'noeol2'))
436
440
def test_make_mpdiffs(self):
437
441
from breezy import multiparent
448
452
def test_make_mpdiffs_with_ghosts(self):
449
453
vf = self.get_file('foo')
451
vf.add_lines_with_ghosts('text', ['ghost'], ['line\n'])
455
vf.add_lines_with_ghosts(b'text', [b'ghost'], [b'line\n'])
452
456
except NotImplementedError:
453
457
# old Weave formats do not allow ghosts
455
self.assertRaises(errors.RevisionNotPresent, vf.make_mpdiffs, ['ghost'])
459
self.assertRaises(errors.RevisionNotPresent,
460
vf.make_mpdiffs, [b'ghost'])
457
462
def _setup_for_deltas(self, f):
458
463
self.assertFalse(f.has_version('base'))
459
464
# add texts that should trip the knit maximum delta chain threshold
460
465
# as well as doing parallel chains of data in knits.
461
466
# this is done by two chains of 25 insertions
462
f.add_lines('base', [], ['line\n'])
463
f.add_lines('noeol', ['base'], ['line'])
467
f.add_lines(b'base', [], [b'line\n'])
468
f.add_lines(b'noeol', [b'base'], [b'line'])
464
469
# detailed eol tests:
465
470
# shared last line with parent no-eol
466
f.add_lines('noeolsecond', ['noeol'], ['line\n', 'line'])
471
f.add_lines(b'noeolsecond', [b'noeol'], [b'line\n', b'line'])
467
472
# differing last line with parent, both no-eol
468
f.add_lines('noeolnotshared', ['noeolsecond'], ['line\n', 'phone'])
473
f.add_lines(b'noeolnotshared', [b'noeolsecond'], [b'line\n', b'phone'])
469
474
# add eol following a noneol parent, change content
470
f.add_lines('eol', ['noeol'], ['phone\n'])
475
f.add_lines(b'eol', [b'noeol'], [b'phone\n'])
471
476
# add eol following a noneol parent, no change content
472
f.add_lines('eolline', ['noeol'], ['line\n'])
477
f.add_lines(b'eolline', [b'noeol'], [b'line\n'])
473
478
# noeol with no parents:
474
f.add_lines('noeolbase', [], ['line'])
479
f.add_lines(b'noeolbase', [], [b'line'])
475
480
# noeol preceeding its leftmost parent in the output:
476
481
# this is done by making it a merge of two parents with no common
477
482
# anestry: noeolbase and noeol with the
478
483
# later-inserted parent the leftmost.
479
f.add_lines('eolbeforefirstparent', ['noeolbase', 'noeol'], ['line'])
484
f.add_lines(b'eolbeforefirstparent', [
485
b'noeolbase', b'noeol'], [b'line'])
480
486
# two identical eol texts
481
f.add_lines('noeoldup', ['noeol'], ['line'])
483
text_name = 'chain1-'
485
sha1s = {0: 'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
486
1: '45e21ea146a81ea44a821737acdb4f9791c8abe7',
487
2: 'e1f11570edf3e2a070052366c582837a4fe4e9fa',
488
3: '26b4b8626da827088c514b8f9bbe4ebf181edda1',
489
4: 'e28a5510be25ba84d31121cff00956f9970ae6f6',
490
5: 'd63ec0ce22e11dcf65a931b69255d3ac747a318d',
491
6: '2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
492
7: '95c14da9cafbf828e3e74a6f016d87926ba234ab',
493
8: '779e9a0b28f9f832528d4b21e17e168c67697272',
494
9: '1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
495
10: '131a2ae712cf51ed62f143e3fbac3d4206c25a05',
496
11: 'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
497
12: '31a2286267f24d8bedaa43355f8ad7129509ea85',
498
13: 'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
499
14: '2c4b1736566b8ca6051e668de68650686a3922f2',
500
15: '5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
501
16: 'b0d2e18d3559a00580f6b49804c23fea500feab3',
502
17: '8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
503
18: '5cf64a3459ae28efa60239e44b20312d25b253f3',
504
19: '1ebed371807ba5935958ad0884595126e8c4e823',
505
20: '2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
506
21: '01edc447978004f6e4e962b417a4ae1955b6fe5d',
507
22: 'd8d8dc49c4bf0bab401e0298bb5ad827768618bb',
508
23: 'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
509
24: 'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
510
25: 'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
487
f.add_lines(b'noeoldup', [b'noeol'], [b'line'])
488
next_parent = b'base'
489
text_name = b'chain1-'
491
sha1s = {0: b'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
492
1: b'45e21ea146a81ea44a821737acdb4f9791c8abe7',
493
2: b'e1f11570edf3e2a070052366c582837a4fe4e9fa',
494
3: b'26b4b8626da827088c514b8f9bbe4ebf181edda1',
495
4: b'e28a5510be25ba84d31121cff00956f9970ae6f6',
496
5: b'd63ec0ce22e11dcf65a931b69255d3ac747a318d',
497
6: b'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
498
7: b'95c14da9cafbf828e3e74a6f016d87926ba234ab',
499
8: b'779e9a0b28f9f832528d4b21e17e168c67697272',
500
9: b'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
501
10: b'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
502
11: b'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
503
12: b'31a2286267f24d8bedaa43355f8ad7129509ea85',
504
13: b'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
505
14: b'2c4b1736566b8ca6051e668de68650686a3922f2',
506
15: b'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
507
16: b'b0d2e18d3559a00580f6b49804c23fea500feab3',
508
17: b'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
509
18: b'5cf64a3459ae28efa60239e44b20312d25b253f3',
510
19: b'1ebed371807ba5935958ad0884595126e8c4e823',
511
20: b'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
512
21: b'01edc447978004f6e4e962b417a4ae1955b6fe5d',
513
22: b'd8d8dc49c4bf0bab401e0298bb5ad827768618bb',
514
23: b'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
515
24: b'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
516
25: b'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
512
518
for depth in range(26):
513
new_version = text_name + '%s' % depth
514
text = text + ['line\n']
519
new_version = text_name + b'%d' % depth
520
text = text + [b'line\n']
515
521
f.add_lines(new_version, [next_parent], text)
516
522
next_parent = new_version
518
text_name = 'chain2-'
523
next_parent = b'base'
524
text_name = b'chain2-'
520
526
for depth in range(26):
521
new_version = text_name + '%s' % depth
522
text = text + ['line\n']
527
new_version = text_name + b'%d' % depth
528
text = text + [b'line\n']
523
529
f.add_lines(new_version, [next_parent], text)
524
530
next_parent = new_version
527
533
def test_ancestry(self):
528
534
f = self.get_file()
529
535
self.assertEqual([], f.get_ancestry([]))
530
f.add_lines('r0', [], ['a\n', 'b\n'])
531
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
532
f.add_lines('r2', ['r0'], ['b\n', 'c\n'])
533
f.add_lines('r3', ['r2'], ['b\n', 'c\n'])
534
f.add_lines('rM', ['r1', 'r2'], ['b\n', 'c\n'])
536
f.add_lines(b'r0', [], [b'a\n', b'b\n'])
537
f.add_lines(b'r1', [b'r0'], [b'b\n', b'c\n'])
538
f.add_lines(b'r2', [b'r0'], [b'b\n', b'c\n'])
539
f.add_lines(b'r3', [b'r2'], [b'b\n', b'c\n'])
540
f.add_lines(b'rM', [b'r1', b'r2'], [b'b\n', b'c\n'])
535
541
self.assertEqual([], f.get_ancestry([]))
536
versions = f.get_ancestry(['rM'])
542
versions = f.get_ancestry([b'rM'])
537
543
# there are some possibilities:
541
547
# so we check indexes
542
r0 = versions.index('r0')
543
r1 = versions.index('r1')
544
r2 = versions.index('r2')
545
self.assertFalse('r3' in versions)
546
rM = versions.index('rM')
548
r0 = versions.index(b'r0')
549
r1 = versions.index(b'r1')
550
r2 = versions.index(b'r2')
551
self.assertFalse(b'r3' in versions)
552
rM = versions.index(b'rM')
547
553
self.assertTrue(r0 < r1)
548
554
self.assertTrue(r0 < r2)
549
555
self.assertTrue(r1 < rM)
550
556
self.assertTrue(r2 < rM)
552
558
self.assertRaises(RevisionNotPresent,
553
f.get_ancestry, ['rM', 'rX'])
559
f.get_ancestry, [b'rM', b'rX'])
555
self.assertEqual(set(f.get_ancestry('rM')),
556
set(f.get_ancestry('rM', topo_sorted=False)))
561
self.assertEqual(set(f.get_ancestry(b'rM')),
562
set(f.get_ancestry(b'rM', topo_sorted=False)))
558
564
def test_mutate_after_finish(self):
559
565
self._transaction = 'before'
560
566
f = self.get_file()
561
567
self._transaction = 'after'
562
self.assertRaises(errors.OutSideTransaction, f.add_lines, '', [], [])
563
self.assertRaises(errors.OutSideTransaction, f.add_lines_with_ghosts, '', [], [])
568
self.assertRaises(errors.OutSideTransaction, f.add_lines, b'', [], [])
569
self.assertRaises(errors.OutSideTransaction,
570
f.add_lines_with_ghosts, b'', [], [])
565
572
def test_copy_to(self):
566
573
f = self.get_file()
567
f.add_lines('0', [], ['a\n'])
574
f.add_lines(b'0', [], [b'a\n'])
568
575
t = MemoryTransport()
569
576
f.copy_to('foo', t)
570
577
for suffix in self.get_factory().get_suffixes():
578
585
def test_get_parent_map(self):
579
586
f = self.get_file()
580
f.add_lines('r0', [], ['a\n', 'b\n'])
582
{'r0':()}, f.get_parent_map(['r0']))
583
f.add_lines('r1', ['r0'], ['a\n', 'b\n'])
585
{'r1':('r0',)}, f.get_parent_map(['r1']))
589
f.get_parent_map(['r0', 'r1']))
590
f.add_lines('r2', [], ['a\n', 'b\n'])
591
f.add_lines('r3', [], ['a\n', 'b\n'])
592
f.add_lines('m', ['r0', 'r1', 'r2', 'r3'], ['a\n', 'b\n'])
594
{'m':('r0', 'r1', 'r2', 'r3')}, f.get_parent_map(['m']))
595
self.assertEqual({}, f.get_parent_map('y'))
599
f.get_parent_map(['r0', 'y', 'r1']))
587
f.add_lines(b'r0', [], [b'a\n', b'b\n'])
589
{b'r0': ()}, f.get_parent_map([b'r0']))
590
f.add_lines(b'r1', [b'r0'], [b'a\n', b'b\n'])
592
{b'r1': (b'r0',)}, f.get_parent_map([b'r1']))
596
f.get_parent_map([b'r0', b'r1']))
597
f.add_lines(b'r2', [], [b'a\n', b'b\n'])
598
f.add_lines(b'r3', [], [b'a\n', b'b\n'])
599
f.add_lines(b'm', [b'r0', b'r1', b'r2', b'r3'], [b'a\n', b'b\n'])
601
{b'm': (b'r0', b'r1', b'r2', b'r3')}, f.get_parent_map([b'm']))
602
self.assertEqual({}, f.get_parent_map(b'y'))
606
f.get_parent_map([b'r0', b'y', b'r1']))
601
608
def test_annotate(self):
602
609
f = self.get_file()
603
f.add_lines('r0', [], ['a\n', 'b\n'])
604
f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
605
origins = f.annotate('r1')
606
self.assertEqual(origins[0][0], 'r1')
607
self.assertEqual(origins[1][0], 'r0')
610
f.add_lines(b'r0', [], [b'a\n', b'b\n'])
611
f.add_lines(b'r1', [b'r0'], [b'c\n', b'b\n'])
612
origins = f.annotate(b'r1')
613
self.assertEqual(origins[0][0], b'r1')
614
self.assertEqual(origins[1][0], b'r0')
609
616
self.assertRaises(RevisionNotPresent,
612
619
def test_detection(self):
613
620
# Test weaves detect corruption.
656
663
vf = self.get_file()
657
664
# add a base to get included
658
vf.add_lines('base', [], ['base\n'])
665
vf.add_lines(b'base', [], [b'base\n'])
659
666
# add a ancestor to be included on one side
660
vf.add_lines('lancestor', [], ['lancestor\n'])
667
vf.add_lines(b'lancestor', [], [b'lancestor\n'])
661
668
# add a ancestor to be included on the other side
662
vf.add_lines('rancestor', ['base'], ['rancestor\n'])
669
vf.add_lines(b'rancestor', [b'base'], [b'rancestor\n'])
663
670
# add a child of rancestor with no eofile-nl
664
vf.add_lines('child', ['rancestor'], ['base\n', 'child\n'])
671
vf.add_lines(b'child', [b'rancestor'], [b'base\n', b'child\n'])
665
672
# add a child of lancestor and base to join the two roots
666
vf.add_lines('otherchild',
667
['lancestor', 'base'],
668
['base\n', 'lancestor\n', 'otherchild\n'])
673
vf.add_lines(b'otherchild',
674
[b'lancestor', b'base'],
675
[b'base\n', b'lancestor\n', b'otherchild\n'])
669
677
def iter_with_versions(versions, expected):
670
678
# now we need to see what lines are returned, and how often.
672
680
progress = InstrumentedProgress()
673
681
# iterate over the lines
674
682
for line in vf.iter_lines_added_or_present_in_versions(versions,
676
684
lines.setdefault(line, 0)
678
if []!= progress.updates:
686
if [] != progress.updates:
679
687
self.assertEqual(expected, progress.updates)
681
lines = iter_with_versions(['child', 'otherchild'],
689
lines = iter_with_versions([b'child', b'otherchild'],
682
690
[('Walking content', 0, 2),
683
691
('Walking content', 1, 2),
684
692
('Walking content', 2, 2)])
685
693
# we must see child and otherchild
686
self.assertTrue(lines[('child\n', 'child')] > 0)
687
self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
694
self.assertTrue(lines[(b'child\n', b'child')] > 0)
695
self.assertTrue(lines[(b'otherchild\n', b'otherchild')] > 0)
688
696
# we dont care if we got more than that.
712
720
parent_id_unicode = u'b\xbfse'
713
721
parent_id_utf8 = parent_id_unicode.encode('utf8')
715
vf.add_lines_with_ghosts('notbxbfse', [parent_id_utf8], [])
723
vf.add_lines_with_ghosts(b'notbxbfse', [parent_id_utf8], [])
716
724
except NotImplementedError:
717
725
# check the other ghost apis are also not implemented
718
self.assertRaises(NotImplementedError, vf.get_ancestry_with_ghosts, ['foo'])
719
self.assertRaises(NotImplementedError, vf.get_parents_with_ghosts, 'foo')
726
self.assertRaises(NotImplementedError,
727
vf.get_ancestry_with_ghosts, [b'foo'])
728
self.assertRaises(NotImplementedError,
729
vf.get_parents_with_ghosts, b'foo')
721
731
vf = self.reopen_file()
722
732
# test key graph related apis: getncestry, _graph, get_parents
724
734
# - these are ghost unaware and must not be reflect ghosts
725
self.assertEqual(['notbxbfse'], vf.get_ancestry('notbxbfse'))
735
self.assertEqual([b'notbxbfse'], vf.get_ancestry(b'notbxbfse'))
726
736
self.assertFalse(vf.has_version(parent_id_utf8))
727
737
# we have _with_ghost apis to give us ghost information.
728
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry_with_ghosts(['notbxbfse']))
729
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
738
self.assertEqual([parent_id_utf8, b'notbxbfse'],
739
vf.get_ancestry_with_ghosts([b'notbxbfse']))
740
self.assertEqual([parent_id_utf8],
741
vf.get_parents_with_ghosts(b'notbxbfse'))
730
742
# if we add something that is a ghost of another, it should correct the
731
743
# results of the prior apis
732
744
vf.add_lines(parent_id_utf8, [], [])
733
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry(['notbxbfse']))
734
self.assertEqual({'notbxbfse':(parent_id_utf8,)},
735
vf.get_parent_map(['notbxbfse']))
745
self.assertEqual([parent_id_utf8, b'notbxbfse'],
746
vf.get_ancestry([b'notbxbfse']))
747
self.assertEqual({b'notbxbfse': (parent_id_utf8,)},
748
vf.get_parent_map([b'notbxbfse']))
736
749
self.assertTrue(vf.has_version(parent_id_utf8))
737
750
# we have _with_ghost apis to give us ghost information.
738
self.assertEqual([parent_id_utf8, 'notbxbfse'],
739
vf.get_ancestry_with_ghosts(['notbxbfse']))
740
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
751
self.assertEqual([parent_id_utf8, b'notbxbfse'],
752
vf.get_ancestry_with_ghosts([b'notbxbfse']))
753
self.assertEqual([parent_id_utf8],
754
vf.get_parents_with_ghosts(b'notbxbfse'))
742
756
def test_add_lines_with_ghosts_after_normal_revs(self):
743
757
# some versioned file formats allow lines to be added with parent
747
761
vf = self.get_file()
748
762
# probe for ghost support
750
vf.add_lines_with_ghosts('base', [], ['line\n', 'line_b\n'])
764
vf.add_lines_with_ghosts(b'base', [], [b'line\n', b'line_b\n'])
751
765
except NotImplementedError:
753
vf.add_lines_with_ghosts('references_ghost',
755
['line\n', 'line_b\n', 'line_c\n'])
756
origins = vf.annotate('references_ghost')
757
self.assertEqual(('base', 'line\n'), origins[0])
758
self.assertEqual(('base', 'line_b\n'), origins[1])
759
self.assertEqual(('references_ghost', 'line_c\n'), origins[2])
767
vf.add_lines_with_ghosts(b'references_ghost',
768
[b'base', b'a_ghost'],
769
[b'line\n', b'line_b\n', b'line_c\n'])
770
origins = vf.annotate(b'references_ghost')
771
self.assertEqual((b'base', b'line\n'), origins[0])
772
self.assertEqual((b'base', b'line_b\n'), origins[1])
773
self.assertEqual((b'references_ghost', b'line_c\n'), origins[2])
761
775
def test_readonly_mode(self):
762
776
t = self.get_transport()
763
777
factory = self.get_factory()
764
778
vf = factory('id', t, 0o777, create=True, access_mode='w')
765
779
vf = factory('id', t, access_mode='r')
766
self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
780
self.assertRaises(errors.ReadOnlyError, vf.add_lines, b'base', [], [])
767
781
self.assertRaises(errors.ReadOnlyError,
768
782
vf.add_lines_with_ghosts,
798
812
w = WeaveFile('foo', self.get_transport(),
800
814
get_scope=self.get_transaction)
801
w.add_lines('v1', [], ['hello\n'])
802
w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
815
w.add_lines(b'v1', [], [b'hello\n'])
816
w.add_lines(b'v2', [b'v1'], [b'hello\n', b'there\n'])
804
818
# We are going to invasively corrupt the text
805
819
# Make sure the internals of weave are the same
806
self.assertEqual([('{', 0)
820
self.assertEqual([(b'{', 0), b'hello\n', (b'}', None), (b'{', 1), b'there\n', (b'}', None)
814
self.assertEqual(['f572d396fae9206628714fb2ce00f72e94f2258f'
815
, '90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
823
self.assertEqual([b'f572d396fae9206628714fb2ce00f72e94f2258f', b'90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
820
w._weave[4] = 'There\n'
828
w._weave[4] = b'There\n'
823
831
def get_file_corrupted_checksum(self):
824
832
w = self.get_file_corrupted_text()
826
w._weave[4] = 'there\n'
827
self.assertEqual('hello\nthere\n', w.get_text('v2'))
834
w._weave[4] = b'there\n'
835
self.assertEqual(b'hello\nthere\n', w.get_text(b'v2'))
829
#Invalid checksum, first digit changed
830
w._sha1s[1] = 'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'
837
# Invalid checksum, first digit changed
838
w._sha1s[1] = b'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'
833
841
def reopen_file(self, name='foo', create=False):
858
866
self.plan_merge_vf.fallback_versionedfiles.extend([self.vf1, self.vf2])
860
868
def test_add_lines(self):
861
self.plan_merge_vf.add_lines(('root', 'a:'), [], [])
862
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
863
('root', 'a'), [], [])
864
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
865
('root', 'a:'), None, [])
866
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
867
('root', 'a:'), [], None)
869
self.plan_merge_vf.add_lines((b'root', b'a:'), [], [])
870
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
871
(b'root', b'a'), [], [])
872
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
873
(b'root', b'a:'), None, [])
874
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
875
(b'root', b'a:'), [], None)
869
877
def setup_abcde(self):
870
self.vf1.add_lines(('root', 'A'), [], ['a'])
871
self.vf1.add_lines(('root', 'B'), [('root', 'A')], ['b'])
872
self.vf2.add_lines(('root', 'C'), [], ['c'])
873
self.vf2.add_lines(('root', 'D'), [('root', 'C')], ['d'])
874
self.plan_merge_vf.add_lines(('root', 'E:'),
875
[('root', 'B'), ('root', 'D')], ['e'])
878
self.vf1.add_lines((b'root', b'A'), [], [b'a'])
879
self.vf1.add_lines((b'root', b'B'), [(b'root', b'A')], [b'b'])
880
self.vf2.add_lines((b'root', b'C'), [], [b'c'])
881
self.vf2.add_lines((b'root', b'D'), [(b'root', b'C')], [b'd'])
882
self.plan_merge_vf.add_lines((b'root', b'E:'),
883
[(b'root', b'B'), (b'root', b'D')], [b'e'])
877
885
def test_get_parents(self):
878
886
self.setup_abcde()
879
self.assertEqual({('root', 'B'):(('root', 'A'),)},
880
self.plan_merge_vf.get_parent_map([('root', 'B')]))
881
self.assertEqual({('root', 'D'):(('root', 'C'),)},
882
self.plan_merge_vf.get_parent_map([('root', 'D')]))
883
self.assertEqual({('root', 'E:'):(('root', 'B'), ('root', 'D'))},
884
self.plan_merge_vf.get_parent_map([('root', 'E:')]))
887
self.assertEqual({(b'root', b'B'): ((b'root', b'A'),)},
888
self.plan_merge_vf.get_parent_map([(b'root', b'B')]))
889
self.assertEqual({(b'root', b'D'): ((b'root', b'C'),)},
890
self.plan_merge_vf.get_parent_map([(b'root', b'D')]))
891
self.assertEqual({(b'root', b'E:'): ((b'root', b'B'), (b'root', b'D'))},
892
self.plan_merge_vf.get_parent_map([(b'root', b'E:')]))
885
893
self.assertEqual({},
886
self.plan_merge_vf.get_parent_map([('root', 'F')]))
894
self.plan_merge_vf.get_parent_map([(b'root', b'F')]))
887
895
self.assertEqual({
888
('root', 'B'): (('root', 'A'),),
889
('root', 'D'): (('root', 'C'),),
890
('root', 'E:'): (('root', 'B'), ('root', 'D')),
896
(b'root', b'B'): ((b'root', b'A'),),
897
(b'root', b'D'): ((b'root', b'C'),),
898
(b'root', b'E:'): ((b'root', b'B'), (b'root', b'D')),
892
900
self.plan_merge_vf.get_parent_map(
893
[('root', 'B'), ('root', 'D'), ('root', 'E:'), ('root', 'F')]))
901
[(b'root', b'B'), (b'root', b'D'), (b'root', b'E:'), (b'root', b'F')]))
895
903
def test_get_record_stream(self):
896
904
self.setup_abcde()
897
906
def get_record(suffix):
898
907
return next(self.plan_merge_vf.get_record_stream(
899
[('root', suffix)], 'unordered', True))
900
self.assertEqual('a', get_record('A').get_bytes_as('fulltext'))
901
self.assertEqual('c', get_record('C').get_bytes_as('fulltext'))
902
self.assertEqual('e', get_record('E:').get_bytes_as('fulltext'))
908
[(b'root', suffix)], 'unordered', True))
909
self.assertEqual(b'a', get_record(b'A').get_bytes_as('fulltext'))
910
self.assertEqual(b'a', b''.join(get_record(b'A').iter_bytes_as('chunked')))
911
self.assertEqual(b'c', get_record(b'C').get_bytes_as('fulltext'))
912
self.assertEqual(b'e', get_record(b'E:').get_bytes_as('fulltext'))
903
913
self.assertEqual('absent', get_record('F').storage_kind)
971
981
mp = list(map(addcrlf, mp))
972
982
self.assertEqual(mt.readlines(), mp)
975
984
def testOneInsert(self):
981
990
def testSeparateInserts(self):
982
self.doMerge(['aaa', 'bbb', 'ccc'],
983
['aaa', 'xxx', 'bbb', 'ccc'],
984
['aaa', 'bbb', 'yyy', 'ccc'],
985
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
991
self.doMerge([b'aaa', b'bbb', b'ccc'],
992
[b'aaa', b'xxx', b'bbb', b'ccc'],
993
[b'aaa', b'bbb', b'yyy', b'ccc'],
994
[b'aaa', b'xxx', b'bbb', b'yyy', b'ccc'])
987
996
def testSameInsert(self):
988
self.doMerge(['aaa', 'bbb', 'ccc'],
989
['aaa', 'xxx', 'bbb', 'ccc'],
990
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'],
991
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
992
overlappedInsertExpected = ['aaa', 'xxx', 'yyy', 'bbb']
997
self.doMerge([b'aaa', b'bbb', b'ccc'],
998
[b'aaa', b'xxx', b'bbb', b'ccc'],
999
[b'aaa', b'xxx', b'bbb', b'yyy', b'ccc'],
1000
[b'aaa', b'xxx', b'bbb', b'yyy', b'ccc'])
1001
overlappedInsertExpected = [b'aaa', b'xxx', b'yyy', b'bbb']
993
1003
def testOverlappedInsert(self):
994
self.doMerge(['aaa', 'bbb'],
995
['aaa', 'xxx', 'yyy', 'bbb'],
996
['aaa', 'xxx', 'bbb'], self.overlappedInsertExpected)
1004
self.doMerge([b'aaa', b'bbb'],
1005
[b'aaa', b'xxx', b'yyy', b'bbb'],
1006
[b'aaa', b'xxx', b'bbb'], self.overlappedInsertExpected)
998
1008
# really it ought to reduce this to
999
# ['aaa', 'xxx', 'yyy', 'bbb']
1009
# [b'aaa', b'xxx', b'yyy', b'bbb']
1002
1011
def testClashReplace(self):
1003
self.doMerge(['aaa'],
1006
['<<<<<<< ', 'xxx', '=======', 'yyy', 'zzz',
1012
self.doMerge([b'aaa'],
1015
[b'<<<<<<< ', b'xxx', b'=======', b'yyy', b'zzz',
1009
1018
def testNonClashInsert1(self):
1010
self.doMerge(['aaa'],
1013
['<<<<<<< ', 'xxx', 'aaa', '=======', 'yyy', 'zzz',
1019
self.doMerge([b'aaa'],
1022
[b'<<<<<<< ', b'xxx', b'aaa', b'=======', b'yyy', b'zzz',
1016
1025
def testNonClashInsert2(self):
1017
self.doMerge(['aaa'],
1026
self.doMerge([b'aaa'],
1023
1031
def testDeleteAndModify(self):
1024
1032
"""Clashing delete and modification.
1031
1039
# skippd, not working yet
1034
self.doMerge(['aaa', 'bbb', 'ccc'],
1035
['aaa', 'ddd', 'ccc'],
1037
['<<<<<<<< ', 'aaa', '=======', '>>>>>>> ', 'ccc'])
1042
self.doMerge([b'aaa', b'bbb', b'ccc'],
1043
[b'aaa', b'ddd', b'ccc'],
1045
[b'<<<<<<<< ', b'aaa', b'=======', b'>>>>>>> ', b'ccc'])
1039
1047
def _test_merge_from_strings(self, base, a, b, expected):
1040
1048
w = self.get_file()
1041
w.add_lines('text0', [], base.splitlines(True))
1042
w.add_lines('text1', ['text0'], a.splitlines(True))
1043
w.add_lines('text2', ['text0'], b.splitlines(True))
1049
w.add_lines(b'text0', [], base.splitlines(True))
1050
w.add_lines(b'text1', [b'text0'], a.splitlines(True))
1051
w.add_lines(b'text2', [b'text0'], b.splitlines(True))
1044
1052
self.log('merge plan:')
1045
p = list(w.plan_merge('text1', 'text2'))
1053
p = list(w.plan_merge(b'text1', b'text2'))
1046
1054
for state, line in p:
1048
1056
self.log('%12s | %s' % (state, line[:-1]))
1049
1057
self.log('merge result:')
1050
result_text = ''.join(w.weave_merge(p))
1058
result_text = b''.join(w.weave_merge(p))
1051
1059
self.log(result_text)
1052
1060
self.assertEqualDiff(result_text, expected)
1054
1062
def test_weave_merge_conflicts(self):
1055
1063
# does weave merge properly handle plans that end with unchanged?
1056
result = ''.join(self.get_file().weave_merge([('new-a', 'hello\n')]))
1057
self.assertEqual(result, 'hello\n')
1064
result = b''.join(self.get_file().weave_merge([('new-a', b'hello\n')]))
1065
self.assertEqual(result, b'hello\n')
1059
1067
def test_deletion_extended(self):
1060
1068
"""One side deletes, the other deletes more.
1351
1367
def test_identity_mapper(self):
1352
1368
mapper = versionedfile.ConstantMapper("inventory")
1353
self.assertEqual("inventory", mapper.map(('foo@ar',)))
1354
self.assertEqual("inventory", mapper.map(('quux',)))
1369
self.assertEqual("inventory", mapper.map((b'foo@ar',)))
1370
self.assertEqual("inventory", mapper.map((b'quux',)))
1356
1372
def test_prefix_mapper(self):
1357
1373
#format5: plain
1358
1374
mapper = versionedfile.PrefixMapper()
1359
self.assertEqual("file-id", mapper.map(("file-id", "revision-id")))
1360
self.assertEqual("new-id", mapper.map(("new-id", "revision-id")))
1361
self.assertEqual(('file-id',), mapper.unmap("file-id"))
1362
self.assertEqual(('new-id',), mapper.unmap("new-id"))
1375
self.assertEqual("file-id", mapper.map((b"file-id", b"revision-id")))
1376
self.assertEqual("new-id", mapper.map((b"new-id", b"revision-id")))
1377
self.assertEqual((b'file-id',), mapper.unmap("file-id"))
1378
self.assertEqual((b'new-id',), mapper.unmap("new-id"))
1364
1380
def test_hash_prefix_mapper(self):
1365
1381
#format6: hash + plain
1366
1382
mapper = versionedfile.HashPrefixMapper()
1367
self.assertEqual("9b/file-id", mapper.map(("file-id", "revision-id")))
1368
self.assertEqual("45/new-id", mapper.map(("new-id", "revision-id")))
1369
self.assertEqual(('file-id',), mapper.unmap("9b/file-id"))
1370
self.assertEqual(('new-id',), mapper.unmap("45/new-id"))
1384
"9b/file-id", mapper.map((b"file-id", b"revision-id")))
1385
self.assertEqual("45/new-id", mapper.map((b"new-id", b"revision-id")))
1386
self.assertEqual((b'file-id',), mapper.unmap("9b/file-id"))
1387
self.assertEqual((b'new-id',), mapper.unmap("45/new-id"))
1372
1389
def test_hash_escaped_mapper(self):
1373
1390
#knit1: hash + escaped
1374
1391
mapper = versionedfile.HashEscapedPrefixMapper()
1375
self.assertEqual("88/%2520", mapper.map((" ", "revision-id")))
1376
self.assertEqual("ed/fil%2545-%2549d", mapper.map(("filE-Id",
1378
self.assertEqual("88/ne%2557-%2549d", mapper.map(("neW-Id",
1380
self.assertEqual(('filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))
1381
self.assertEqual(('neW-Id',), mapper.unmap("88/ne%2557-%2549d"))
1392
self.assertEqual("88/%2520", mapper.map((b" ", b"revision-id")))
1393
self.assertEqual("ed/fil%2545-%2549d", mapper.map((b"filE-Id",
1395
self.assertEqual("88/ne%2557-%2549d", mapper.map((b"neW-Id",
1397
self.assertEqual((b'filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))
1398
self.assertEqual((b'neW-Id',), mapper.unmap("88/ne%2557-%2549d"))
1384
1401
class TestVersionedFiles(TestCaseWithMemoryTransport):
1495
1512
raise TestNotApplicable("%s doesn't support fallbacks"
1496
1513
% (f.__class__.__name__,))
1497
1514
g = self.get_versionedfiles('fallback')
1498
key_a = self.get_simple_key('a')
1499
g.add_lines(key_a, [], ['\n'])
1515
key_a = self.get_simple_key(b'a')
1516
g.add_lines(key_a, [], [b'\n'])
1500
1517
f.add_fallback_versioned_files(g)
1501
1518
self.assertTrue(key_a in f.get_parent_map([key_a]))
1502
self.assertFalse(key_a in f.without_fallbacks().get_parent_map([key_a]))
1520
key_a in f.without_fallbacks().get_parent_map([key_a]))
1504
1522
def test_add_lines(self):
1505
1523
f = self.get_versionedfiles()
1506
key0 = self.get_simple_key('r0')
1507
key1 = self.get_simple_key('r1')
1508
key2 = self.get_simple_key('r2')
1509
keyf = self.get_simple_key('foo')
1510
f.add_lines(key0, [], ['a\n', 'b\n'])
1524
key0 = self.get_simple_key(b'r0')
1525
key1 = self.get_simple_key(b'r1')
1526
key2 = self.get_simple_key(b'r2')
1527
keyf = self.get_simple_key(b'foo')
1528
f.add_lines(key0, [], [b'a\n', b'b\n'])
1512
f.add_lines(key1, [key0], ['b\n', 'c\n'])
1530
f.add_lines(key1, [key0], [b'b\n', b'c\n'])
1514
f.add_lines(key1, [], ['b\n', 'c\n'])
1532
f.add_lines(key1, [], [b'b\n', b'c\n'])
1515
1533
keys = f.keys()
1516
1534
self.assertTrue(key0 in keys)
1517
1535
self.assertTrue(key1 in keys)
1519
1537
for record in f.get_record_stream([key0, key1], 'unordered', True):
1520
1538
records.append((record.key, record.get_bytes_as('fulltext')))
1522
self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1540
self.assertEqual([(key0, b'a\nb\n'), (key1, b'b\nc\n')], records)
1542
def test_add_chunks(self):
1543
f = self.get_versionedfiles()
1544
key0 = self.get_simple_key(b'r0')
1545
key1 = self.get_simple_key(b'r1')
1546
key2 = self.get_simple_key(b'r2')
1547
keyf = self.get_simple_key(b'foo')
1548
def add_chunks(key, parents, chunks):
1549
factory = ChunkedContentFactory(
1550
key, parents, osutils.sha_strings(chunks), chunks)
1551
return f.add_content(factory)
1553
add_chunks(key0, [], [b'a', b'\nb\n'])
1555
add_chunks(key1, [key0], [b'b', b'\n', b'c\n'])
1557
add_chunks(key1, [], [b'b\n', b'c\n'])
1559
self.assertIn(key0, keys)
1560
self.assertIn(key1, keys)
1562
for record in f.get_record_stream([key0, key1], 'unordered', True):
1563
records.append((record.key, record.get_bytes_as('fulltext')))
1565
self.assertEqual([(key0, b'a\nb\n'), (key1, b'b\nc\n')], records)
1524
1567
def test_annotate(self):
1525
1568
files = self.get_versionedfiles()
1527
1570
if self.key_length == 1:
1573
prefix = (b'FileA',)
1531
1574
# introduced full text
1532
origins = files.annotate(prefix + ('origin',))
1575
origins = files.annotate(prefix + (b'origin',))
1533
1576
self.assertEqual([
1534
(prefix + ('origin',), 'origin\n')],
1577
(prefix + (b'origin',), b'origin\n')],
1537
origins = files.annotate(prefix + ('base',))
1580
origins = files.annotate(prefix + (b'base',))
1538
1581
self.assertEqual([
1539
(prefix + ('base',), 'base\n')],
1582
(prefix + (b'base',), b'base\n')],
1542
origins = files.annotate(prefix + ('merged',))
1585
origins = files.annotate(prefix + (b'merged',))
1544
1587
self.assertEqual([
1545
(prefix + ('base',), 'base\n'),
1546
(prefix + ('left',), 'left\n'),
1547
(prefix + ('right',), 'right\n'),
1548
(prefix + ('merged',), 'merged\n')
1588
(prefix + (b'base',), b'base\n'),
1589
(prefix + (b'left',), b'left\n'),
1590
(prefix + (b'right',), b'right\n'),
1591
(prefix + (b'merged',), b'merged\n')
1552
1595
# Without a graph everything is new.
1553
1596
self.assertEqual([
1554
(prefix + ('merged',), 'base\n'),
1555
(prefix + ('merged',), 'left\n'),
1556
(prefix + ('merged',), 'right\n'),
1557
(prefix + ('merged',), 'merged\n')
1597
(prefix + (b'merged',), b'base\n'),
1598
(prefix + (b'merged',), b'left\n'),
1599
(prefix + (b'merged',), b'right\n'),
1600
(prefix + (b'merged',), b'merged\n')
1560
1603
self.assertRaises(RevisionNotPresent,
1561
files.annotate, prefix + ('missing-key',))
1604
files.annotate, prefix + ('missing-key',))
1563
1606
def test_check_no_parameters(self):
1564
1607
files = self.get_versionedfiles()
1639
1682
results.append(add[:2])
1640
1683
if self.key_length == 1:
1641
1684
self.assertEqual([
1642
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1643
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1644
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1645
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1646
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1685
(b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1686
(b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1687
(b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1688
(b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1689
(b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1648
1691
elif self.key_length == 2:
1649
1692
self.assertEqual([
1650
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1651
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1652
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1653
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1654
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1655
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1656
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1657
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1658
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1659
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1693
(b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1694
(b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1695
(b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1696
(b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1697
(b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1698
(b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1699
(b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1700
(b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1701
(b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1702
(b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1662
1705
def test_add_lines_no_key_generates_chk_key(self):
1670
1713
results.append(add[:2])
1671
1714
if self.key_length == 1:
1672
1715
self.assertEqual([
1673
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1674
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1675
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1676
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1677
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1716
(b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1717
(b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1718
(b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1719
(b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1720
(b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1679
1722
# Check the added items got CHK keys.
1680
1723
self.assertEqual({
1681
('sha1:00e364d235126be43292ab09cb4686cf703ddc17',),
1682
('sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',),
1683
('sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',),
1684
('sha1:a8478686da38e370e32e42e8a0c220e33ee9132f',),
1685
('sha1:ed8bce375198ea62444dc71952b22cfc2b09226d',),
1724
(b'sha1:00e364d235126be43292ab09cb4686cf703ddc17',),
1725
(b'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',),
1726
(b'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',),
1727
(b'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f',),
1728
(b'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d',),
1688
1731
elif self.key_length == 2:
1689
1732
self.assertEqual([
1690
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1691
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1692
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1693
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1694
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1695
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1696
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1697
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1698
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1699
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1733
(b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1734
(b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1735
(b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1736
(b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1737
(b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1738
(b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1739
(b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1740
(b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1741
(b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1742
(b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1701
1744
# Check the added items got CHK keys.
1702
1745
self.assertEqual({
1703
('FileA', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1704
('FileA', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1705
('FileA', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1706
('FileA', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1707
('FileA', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1708
('FileB', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1709
('FileB', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1710
('FileB', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1711
('FileB', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1712
('FileB', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1746
(b'FileA', b'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1747
(b'FileA', b'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1748
(b'FileA', b'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1749
(b'FileA', b'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1750
(b'FileA', b'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1751
(b'FileB', b'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1752
(b'FileB', b'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1753
(b'FileB', b'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1754
(b'FileB', b'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1755
(b'FileB', b'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1716
1759
def test_empty_lines(self):
1717
1760
"""Empty files can be stored."""
1718
1761
f = self.get_versionedfiles()
1719
key_a = self.get_simple_key('a')
1762
key_a = self.get_simple_key(b'a')
1720
1763
f.add_lines(key_a, [], [])
1721
self.assertEqual('',
1722
f.get_record_stream([key_a], 'unordered', True
1723
).next().get_bytes_as('fulltext'))
1724
key_b = self.get_simple_key('b')
1764
self.assertEqual(b'',
1765
next(f.get_record_stream([key_a], 'unordered', True
1766
)).get_bytes_as('fulltext'))
1767
key_b = self.get_simple_key(b'b')
1725
1768
f.add_lines(key_b, self.get_parents([key_a]), [])
1726
self.assertEqual('',
1727
f.get_record_stream([key_b], 'unordered', True
1728
).next().get_bytes_as('fulltext'))
1769
self.assertEqual(b'',
1770
next(f.get_record_stream([key_b], 'unordered', True
1771
)).get_bytes_as('fulltext'))
1730
1773
def test_newline_only(self):
1731
1774
f = self.get_versionedfiles()
1732
key_a = self.get_simple_key('a')
1733
f.add_lines(key_a, [], ['\n'])
1734
self.assertEqual('\n',
1735
f.get_record_stream([key_a], 'unordered', True
1736
).next().get_bytes_as('fulltext'))
1737
key_b = self.get_simple_key('b')
1738
f.add_lines(key_b, self.get_parents([key_a]), ['\n'])
1739
self.assertEqual('\n',
1740
f.get_record_stream([key_b], 'unordered', True
1741
).next().get_bytes_as('fulltext'))
1775
key_a = self.get_simple_key(b'a')
1776
f.add_lines(key_a, [], [b'\n'])
1777
self.assertEqual(b'\n',
1778
next(f.get_record_stream([key_a], 'unordered', True
1779
)).get_bytes_as('fulltext'))
1780
key_b = self.get_simple_key(b'b')
1781
f.add_lines(key_b, self.get_parents([key_a]), [b'\n'])
1782
self.assertEqual(b'\n',
1783
next(f.get_record_stream([key_b], 'unordered', True
1784
)).get_bytes_as('fulltext'))
1743
1786
def test_get_known_graph_ancestry(self):
1744
1787
f = self.get_versionedfiles()
1745
1788
if not self.graph:
1746
1789
raise TestNotApplicable('ancestry info only relevant with graph.')
1747
key_a = self.get_simple_key('a')
1748
key_b = self.get_simple_key('b')
1749
key_c = self.get_simple_key('c')
1790
key_a = self.get_simple_key(b'a')
1791
key_b = self.get_simple_key(b'b')
1792
key_c = self.get_simple_key(b'c')
1755
f.add_lines(key_a, [], ['\n'])
1756
f.add_lines(key_b, [key_a], ['\n'])
1757
f.add_lines(key_c, [key_a, key_b], ['\n'])
1798
f.add_lines(key_a, [], [b'\n'])
1799
f.add_lines(key_b, [key_a], [b'\n'])
1800
f.add_lines(key_c, [key_a, key_b], [b'\n'])
1758
1801
kg = f.get_known_graph_ancestry([key_c])
1759
1802
self.assertIsInstance(kg, _mod_graph.KnownGraph)
1760
1803
self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
1791
1834
def assertValidStorageKind(self, storage_kind):
1792
1835
"""Assert that storage_kind is a valid storage_kind."""
1793
1836
self.assertSubset([storage_kind],
1794
['mpdiff', 'knit-annotated-ft', 'knit-annotated-delta',
1795
'knit-ft', 'knit-delta', 'chunked', 'fulltext',
1796
'knit-annotated-ft-gz', 'knit-annotated-delta-gz', 'knit-ft-gz',
1798
'knit-delta-closure', 'knit-delta-closure-ref',
1799
'groupcompress-block', 'groupcompress-block-ref'])
1837
['mpdiff', 'knit-annotated-ft', 'knit-annotated-delta',
1838
'knit-ft', 'knit-delta', 'chunked', 'fulltext',
1839
'knit-annotated-ft-gz', 'knit-annotated-delta-gz', 'knit-ft-gz',
1841
'knit-delta-closure', 'knit-delta-closure-ref',
1842
'groupcompress-block', 'groupcompress-block-ref'])
1801
1844
def capture_stream(self, f, entries, on_seen, parents,
1802
require_fulltext=False):
1845
require_fulltext=False):
1803
1846
"""Capture a stream for testing."""
1804
1847
for factory in entries:
1805
1848
on_seen(factory.key)
1806
1849
self.assertValidStorageKind(factory.storage_kind)
1807
1850
if factory.sha1 is not None:
1808
1851
self.assertEqual(f.get_sha1s([factory.key])[factory.key],
1810
1853
self.assertEqual(parents[factory.key], factory.parents)
1811
1854
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1813
1856
if require_fulltext:
1814
1857
factory.get_bytes_as('fulltext')
1827
1870
def get_keys_and_sort_order(self):
1828
1871
"""Get diamond test keys list, and their sort ordering."""
1829
1872
if self.key_length == 1:
1830
keys = [('merged',), ('left',), ('right',), ('base',)]
1831
sort_order = {('merged',):2, ('left',):1, ('right',):1, ('base',):0}
1873
keys = [(b'merged',), (b'left',), (b'right',), (b'base',)]
1874
sort_order = {(b'merged',): 2, (b'left',): 1,
1875
(b'right',): 1, (b'base',): 0}
1834
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1836
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1878
(b'FileA', b'merged'), (b'FileA', b'left'), (b'FileA', b'right'),
1879
(b'FileA', b'base'),
1880
(b'FileB', b'merged'), (b'FileB', b'left'), (b'FileB', b'right'),
1881
(b'FileB', b'base'),
1840
('FileA', 'merged'): 2, ('FileA', 'left'): 1, ('FileA', 'right'): 1,
1841
('FileA', 'base'): 0,
1842
('FileB', 'merged'): 2, ('FileB', 'left'): 1, ('FileB', 'right'): 1,
1843
('FileB', 'base'): 0,
1884
(b'FileA', b'merged'): 2, (b'FileA', b'left'): 1, (b'FileA', b'right'): 1,
1885
(b'FileA', b'base'): 0,
1886
(b'FileB', b'merged'): 2, (b'FileB', b'left'): 1, (b'FileB', b'right'): 1,
1887
(b'FileB', b'base'): 0,
1845
1889
return keys, sort_order
1847
1891
def get_keys_and_groupcompress_sort_order(self):
1848
1892
"""Get diamond test keys list, and their groupcompress sort ordering."""
1849
1893
if self.key_length == 1:
1850
keys = [('merged',), ('left',), ('right',), ('base',)]
1851
sort_order = {('merged',):0, ('left',):1, ('right',):1, ('base',):2}
1894
keys = [(b'merged',), (b'left',), (b'right',), (b'base',)]
1895
sort_order = {(b'merged',): 0, (b'left',): 1,
1896
(b'right',): 1, (b'base',): 2}
1854
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1856
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1899
(b'FileA', b'merged'), (b'FileA', b'left'), (b'FileA', b'right'),
1900
(b'FileA', b'base'),
1901
(b'FileB', b'merged'), (b'FileB', b'left'), (b'FileB', b'right'),
1902
(b'FileB', b'base'),
1860
('FileA', 'merged'): 0, ('FileA', 'left'): 1, ('FileA', 'right'): 1,
1861
('FileA', 'base'): 2,
1862
('FileB', 'merged'): 3, ('FileB', 'left'): 4, ('FileB', 'right'): 4,
1863
('FileB', 'base'): 5,
1905
(b'FileA', b'merged'): 0, (b'FileA', b'left'): 1, (b'FileA', b'right'): 1,
1906
(b'FileA', b'base'): 2,
1907
(b'FileB', b'merged'): 3, (b'FileB', b'left'): 4, (b'FileB', b'right'): 4,
1908
(b'FileB', b'base'): 5,
1865
1910
return keys, sort_order
1950
1997
self.assertEqual(parent_map[factory.key], factory.parents)
1951
1998
# currently no stream emits mpdiff
1952
1999
self.assertRaises(errors.UnavailableRepresentation,
1953
factory.get_bytes_as, 'mpdiff')
2000
factory.get_bytes_as, 'mpdiff')
1954
2001
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1956
2003
self.assertEqual(set(keys), seen)
1958
2005
def test_get_record_stream_missing_records_are_absent(self):
1959
2006
files = self.get_versionedfiles()
1960
2007
self.get_diamond_files(files)
1961
2008
if self.key_length == 1:
1962
keys = [('merged',), ('left',), ('right',), ('absent',), ('base',)]
2009
keys = [(b'merged',), (b'left',), (b'right',),
2010
(b'absent',), (b'base',)]
1965
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1966
('FileA', 'absent'), ('FileA', 'base'),
1967
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1968
('FileB', 'absent'), ('FileB', 'base'),
1969
('absent', 'absent'),
2013
(b'FileA', b'merged'), (b'FileA', b'left'), (b'FileA', b'right'),
2014
(b'FileA', b'absent'), (b'FileA', b'base'),
2015
(b'FileB', b'merged'), (b'FileB', b'left'), (b'FileB', b'right'),
2016
(b'FileB', b'absent'), (b'FileB', b'base'),
2017
(b'absent', b'absent'),
1971
2019
parent_map = files.get_parent_map(keys)
1972
2020
entries = files.get_record_stream(keys, 'unordered', False)
2228
2279
self.assertRaises(RevisionNotPresent,
2229
files.get_annotator().annotate, self.get_simple_key('missing-key'))
2280
files.get_annotator().annotate, self.get_simple_key(b'missing-key'))
2231
2282
def test_get_parent_map(self):
2232
2283
files = self.get_versionedfiles()
2233
2284
if self.key_length == 1:
2234
2285
parent_details = [
2235
(('r0',), self.get_parents(())),
2236
(('r1',), self.get_parents((('r0',),))),
2237
(('r2',), self.get_parents(())),
2238
(('r3',), self.get_parents(())),
2239
(('m',), self.get_parents((('r0',), ('r1',), ('r2',), ('r3',)))),
2286
((b'r0',), self.get_parents(())),
2287
((b'r1',), self.get_parents(((b'r0',),))),
2288
((b'r2',), self.get_parents(())),
2289
((b'r3',), self.get_parents(())),
2290
((b'm',), self.get_parents(((b'r0',), (b'r1',), (b'r2',), (b'r3',)))),
2242
2293
parent_details = [
2243
(('FileA', 'r0'), self.get_parents(())),
2244
(('FileA', 'r1'), self.get_parents((('FileA', 'r0'),))),
2245
(('FileA', 'r2'), self.get_parents(())),
2246
(('FileA', 'r3'), self.get_parents(())),
2247
(('FileA', 'm'), self.get_parents((('FileA', 'r0'),
2248
('FileA', 'r1'), ('FileA', 'r2'), ('FileA', 'r3')))),
2294
((b'FileA', b'r0'), self.get_parents(())),
2295
((b'FileA', b'r1'), self.get_parents(((b'FileA', b'r0'),))),
2296
((b'FileA', b'r2'), self.get_parents(())),
2297
((b'FileA', b'r3'), self.get_parents(())),
2298
((b'FileA', b'm'), self.get_parents(((b'FileA', b'r0'),
2299
(b'FileA', b'r1'), (b'FileA', b'r2'), (b'FileA', b'r3')))),
2250
2301
for key, parents in parent_details:
2251
2302
files.add_lines(key, parents, [])
2252
2303
# immediately after adding it should be queryable.
2253
self.assertEqual({key:parents}, files.get_parent_map([key]))
2304
self.assertEqual({key: parents}, files.get_parent_map([key]))
2254
2305
# We can ask for an empty set
2255
2306
self.assertEqual({}, files.get_parent_map([]))
2256
2307
# We can ask for many keys
2257
2308
all_parents = dict(parent_details)
2258
2309
self.assertEqual(all_parents, files.get_parent_map(all_parents.keys()))
2259
2310
# Absent keys are just not included in the result.
2260
keys = all_parents.keys()
2311
keys = list(all_parents.keys())
2261
2312
if self.key_length == 1:
2262
keys.insert(1, ('missing',))
2313
keys.insert(1, (b'missing',))
2264
keys.insert(1, ('missing', 'missing'))
2315
keys.insert(1, (b'missing', b'missing'))
2265
2316
# Absent keys are just ignored
2266
2317
self.assertEqual(all_parents, files.get_parent_map(keys))
2269
2320
files = self.get_versionedfiles()
2270
2321
self.get_diamond_files(files)
2271
2322
if self.key_length == 1:
2272
keys = [('base',), ('origin',), ('left',), ('merged',), ('right',)]
2323
keys = [(b'base',), (b'origin',), (b'left',),
2324
(b'merged',), (b'right',)]
2274
2326
# ask for shas from different prefixes.
2276
('FileA', 'base'), ('FileB', 'origin'), ('FileA', 'left'),
2277
('FileA', 'merged'), ('FileB', 'right'),
2328
(b'FileA', b'base'), (b'FileB', b'origin'), (b'FileA', b'left'),
2329
(b'FileA', b'merged'), (b'FileB', b'right'),
2279
2331
self.assertEqual({
2280
keys[0]: '51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',
2281
keys[1]: '00e364d235126be43292ab09cb4686cf703ddc17',
2282
keys[2]: 'a8478686da38e370e32e42e8a0c220e33ee9132f',
2283
keys[3]: 'ed8bce375198ea62444dc71952b22cfc2b09226d',
2284
keys[4]: '9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',
2332
keys[0]: b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',
2333
keys[1]: b'00e364d235126be43292ab09cb4686cf703ddc17',
2334
keys[2]: b'a8478686da38e370e32e42e8a0c220e33ee9132f',
2335
keys[3]: b'ed8bce375198ea62444dc71952b22cfc2b09226d',
2336
keys[4]: b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',
2286
2338
files.get_sha1s(keys))
2415
2468
source = self.get_versionedfiles('source')
2416
2469
self.get_diamond_files(source)
2417
2470
if self.key_length == 1:
2418
origin_keys = [('origin',)]
2419
end_keys = [('merged',), ('left',)]
2420
start_keys = [('right',), ('base',)]
2471
origin_keys = [(b'origin',)]
2472
end_keys = [(b'merged',), (b'left',)]
2473
start_keys = [(b'right',), (b'base',)]
2422
origin_keys = [('FileA', 'origin'), ('FileB', 'origin')]
2423
end_keys = [('FileA', 'merged',), ('FileA', 'left',),
2424
('FileB', 'merged',), ('FileB', 'left',)]
2425
start_keys = [('FileA', 'right',), ('FileA', 'base',),
2426
('FileB', 'right',), ('FileB', 'base',)]
2427
origin_entries = source.get_record_stream(origin_keys, 'unordered', False)
2475
origin_keys = [(b'FileA', b'origin'), (b'FileB', b'origin')]
2476
end_keys = [(b'FileA', b'merged',), (b'FileA', b'left',),
2477
(b'FileB', b'merged',), (b'FileB', b'left',)]
2478
start_keys = [(b'FileA', b'right',), (b'FileA', b'base',),
2479
(b'FileB', b'right',), (b'FileB', b'base',)]
2480
origin_entries = source.get_record_stream(
2481
origin_keys, 'unordered', False)
2428
2482
end_entries = source.get_record_stream(end_keys, 'topological', False)
2429
start_entries = source.get_record_stream(start_keys, 'topological', False)
2483
start_entries = source.get_record_stream(
2484
start_keys, 'topological', False)
2430
2485
entries = itertools.chain(origin_entries, end_entries, start_entries)
2432
2487
files.insert_record_stream(entries)
2558
2615
files = self.get_versionedfiles()
2559
2616
# add a base to get included
2560
files.add_lines(self.get_simple_key('base'), (), ['base\n'])
2617
files.add_lines(self.get_simple_key(b'base'), (), [b'base\n'])
2561
2618
# add a ancestor to be included on one side
2562
files.add_lines(self.get_simple_key('lancestor'), (), ['lancestor\n'])
2619
files.add_lines(self.get_simple_key(
2620
b'lancestor'), (), [b'lancestor\n'])
2563
2621
# add a ancestor to be included on the other side
2564
files.add_lines(self.get_simple_key('rancestor'),
2565
self.get_parents([self.get_simple_key('base')]), ['rancestor\n'])
2622
files.add_lines(self.get_simple_key(b'rancestor'),
2623
self.get_parents([self.get_simple_key(b'base')]), [b'rancestor\n'])
2566
2624
# add a child of rancestor with no eofile-nl
2567
files.add_lines(self.get_simple_key('child'),
2568
self.get_parents([self.get_simple_key('rancestor')]),
2569
['base\n', 'child\n'])
2625
files.add_lines(self.get_simple_key(b'child'),
2626
self.get_parents([self.get_simple_key(b'rancestor')]),
2627
[b'base\n', b'child\n'])
2570
2628
# add a child of lancestor and base to join the two roots
2571
files.add_lines(self.get_simple_key('otherchild'),
2572
self.get_parents([self.get_simple_key('lancestor'),
2573
self.get_simple_key('base')]),
2574
['base\n', 'lancestor\n', 'otherchild\n'])
2629
files.add_lines(self.get_simple_key(b'otherchild'),
2630
self.get_parents([self.get_simple_key(b'lancestor'),
2631
self.get_simple_key(b'base')]),
2632
[b'base\n', b'lancestor\n', b'otherchild\n'])
2575
2634
def iter_with_keys(keys, expected):
2576
2635
# now we need to see what lines are returned, and how often.
2578
2637
progress = InstrumentedProgress()
2579
2638
# iterate over the lines
2580
2639
for line in files.iter_lines_added_or_present_in_keys(keys,
2582
2641
lines.setdefault(line, 0)
2583
2642
lines[line] += 1
2584
if []!= progress.updates:
2643
if [] != progress.updates:
2585
2644
self.assertEqual(expected, progress.updates)
2587
2646
lines = iter_with_keys(
2588
[self.get_simple_key('child'), self.get_simple_key('otherchild')],
2647
[self.get_simple_key(b'child'),
2648
self.get_simple_key(b'otherchild')],
2589
2649
[('Walking content', 0, 2),
2590
2650
('Walking content', 1, 2),
2591
2651
('Walking content', 2, 2)])
2592
2652
# we must see child and otherchild
2593
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2653
self.assertTrue(lines[(b'child\n', self.get_simple_key(b'child'))] > 0)
2594
2654
self.assertTrue(
2595
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2655
lines[(b'otherchild\n', self.get_simple_key(b'otherchild'))] > 0)
2596
2656
# we dont care if we got more than that.
2598
2658
# test all lines
2599
2659
lines = iter_with_keys(files.keys(),
2600
[('Walking content', 0, 5),
2601
('Walking content', 1, 5),
2602
('Walking content', 2, 5),
2603
('Walking content', 3, 5),
2604
('Walking content', 4, 5),
2605
('Walking content', 5, 5)])
2660
[('Walking content', 0, 5),
2661
('Walking content', 1, 5),
2662
('Walking content', 2, 5),
2663
('Walking content', 3, 5),
2664
('Walking content', 4, 5),
2665
('Walking content', 5, 5)])
2606
2666
# all lines must be seen at least once
2607
self.assertTrue(lines[('base\n', self.get_simple_key('base'))] > 0)
2609
lines[('lancestor\n', self.get_simple_key('lancestor'))] > 0)
2611
lines[('rancestor\n', self.get_simple_key('rancestor'))] > 0)
2612
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2614
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2667
self.assertTrue(lines[(b'base\n', self.get_simple_key(b'base'))] > 0)
2669
lines[(b'lancestor\n', self.get_simple_key(b'lancestor'))] > 0)
2671
lines[(b'rancestor\n', self.get_simple_key(b'rancestor'))] > 0)
2672
self.assertTrue(lines[(b'child\n', self.get_simple_key(b'child'))] > 0)
2674
lines[(b'otherchild\n', self.get_simple_key(b'otherchild'))] > 0)
2616
2676
def test_make_mpdiffs(self):
2617
2677
from breezy import multiparent
2619
2679
# add texts that should trip the knit maximum delta chain threshold
2620
2680
# as well as doing parallel chains of data in knits.
2621
2681
# this is done by two chains of 25 insertions
2622
files.add_lines(self.get_simple_key('base'), [], ['line\n'])
2623
files.add_lines(self.get_simple_key('noeol'),
2624
self.get_parents([self.get_simple_key('base')]), ['line'])
2682
files.add_lines(self.get_simple_key(b'base'), [], [b'line\n'])
2683
files.add_lines(self.get_simple_key(b'noeol'),
2684
self.get_parents([self.get_simple_key(b'base')]), [b'line'])
2625
2685
# detailed eol tests:
2626
2686
# shared last line with parent no-eol
2627
files.add_lines(self.get_simple_key('noeolsecond'),
2628
self.get_parents([self.get_simple_key('noeol')]),
2687
files.add_lines(self.get_simple_key(b'noeolsecond'),
2688
self.get_parents([self.get_simple_key(b'noeol')]),
2689
[b'line\n', b'line'])
2630
2690
# differing last line with parent, both no-eol
2631
files.add_lines(self.get_simple_key('noeolnotshared'),
2632
self.get_parents([self.get_simple_key('noeolsecond')]),
2633
['line\n', 'phone'])
2691
files.add_lines(self.get_simple_key(b'noeolnotshared'),
2693
[self.get_simple_key(b'noeolsecond')]),
2694
[b'line\n', b'phone'])
2634
2695
# add eol following a noneol parent, change content
2635
files.add_lines(self.get_simple_key('eol'),
2636
self.get_parents([self.get_simple_key('noeol')]), ['phone\n'])
2696
files.add_lines(self.get_simple_key(b'eol'),
2697
self.get_parents([self.get_simple_key(b'noeol')]), [b'phone\n'])
2637
2698
# add eol following a noneol parent, no change content
2638
files.add_lines(self.get_simple_key('eolline'),
2639
self.get_parents([self.get_simple_key('noeol')]), ['line\n'])
2699
files.add_lines(self.get_simple_key(b'eolline'),
2700
self.get_parents([self.get_simple_key(b'noeol')]), [b'line\n'])
2640
2701
# noeol with no parents:
2641
files.add_lines(self.get_simple_key('noeolbase'), [], ['line'])
2702
files.add_lines(self.get_simple_key(b'noeolbase'), [], [b'line'])
2642
2703
# noeol preceeding its leftmost parent in the output:
2643
2704
# this is done by making it a merge of two parents with no common
2644
2705
# anestry: noeolbase and noeol with the
2645
2706
# later-inserted parent the leftmost.
2646
files.add_lines(self.get_simple_key('eolbeforefirstparent'),
2647
self.get_parents([self.get_simple_key('noeolbase'),
2648
self.get_simple_key('noeol')]),
2707
files.add_lines(self.get_simple_key(b'eolbeforefirstparent'),
2708
self.get_parents([self.get_simple_key(b'noeolbase'),
2709
self.get_simple_key(b'noeol')]),
2650
2711
# two identical eol texts
2651
files.add_lines(self.get_simple_key('noeoldup'),
2652
self.get_parents([self.get_simple_key('noeol')]), ['line'])
2653
next_parent = self.get_simple_key('base')
2654
text_name = 'chain1-'
2656
sha1s = {0: 'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
2657
1: '45e21ea146a81ea44a821737acdb4f9791c8abe7',
2658
2: 'e1f11570edf3e2a070052366c582837a4fe4e9fa',
2659
3: '26b4b8626da827088c514b8f9bbe4ebf181edda1',
2660
4: 'e28a5510be25ba84d31121cff00956f9970ae6f6',
2661
5: 'd63ec0ce22e11dcf65a931b69255d3ac747a318d',
2662
6: '2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
2663
7: '95c14da9cafbf828e3e74a6f016d87926ba234ab',
2664
8: '779e9a0b28f9f832528d4b21e17e168c67697272',
2665
9: '1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
2666
10: '131a2ae712cf51ed62f143e3fbac3d4206c25a05',
2667
11: 'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
2668
12: '31a2286267f24d8bedaa43355f8ad7129509ea85',
2669
13: 'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
2670
14: '2c4b1736566b8ca6051e668de68650686a3922f2',
2671
15: '5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
2672
16: 'b0d2e18d3559a00580f6b49804c23fea500feab3',
2673
17: '8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
2674
18: '5cf64a3459ae28efa60239e44b20312d25b253f3',
2675
19: '1ebed371807ba5935958ad0884595126e8c4e823',
2676
20: '2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
2677
21: '01edc447978004f6e4e962b417a4ae1955b6fe5d',
2678
22: 'd8d8dc49c4bf0bab401e0298bb5ad827768618bb',
2679
23: 'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
2680
24: 'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
2681
25: 'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
2712
files.add_lines(self.get_simple_key(b'noeoldup'),
2713
self.get_parents([self.get_simple_key(b'noeol')]), [b'line'])
2714
next_parent = self.get_simple_key(b'base')
2715
text_name = b'chain1-'
2717
sha1s = {0: b'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
2718
1: b'45e21ea146a81ea44a821737acdb4f9791c8abe7',
2719
2: b'e1f11570edf3e2a070052366c582837a4fe4e9fa',
2720
3: b'26b4b8626da827088c514b8f9bbe4ebf181edda1',
2721
4: b'e28a5510be25ba84d31121cff00956f9970ae6f6',
2722
5: b'd63ec0ce22e11dcf65a931b69255d3ac747a318d',
2723
6: b'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
2724
7: b'95c14da9cafbf828e3e74a6f016d87926ba234ab',
2725
8: b'779e9a0b28f9f832528d4b21e17e168c67697272',
2726
9: b'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
2727
10: b'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
2728
11: b'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
2729
12: b'31a2286267f24d8bedaa43355f8ad7129509ea85',
2730
13: b'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
2731
14: b'2c4b1736566b8ca6051e668de68650686a3922f2',
2732
15: b'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
2733
16: b'b0d2e18d3559a00580f6b49804c23fea500feab3',
2734
17: b'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
2735
18: b'5cf64a3459ae28efa60239e44b20312d25b253f3',
2736
19: b'1ebed371807ba5935958ad0884595126e8c4e823',
2737
20: b'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
2738
21: b'01edc447978004f6e4e962b417a4ae1955b6fe5d',
2739
22: b'd8d8dc49c4bf0bab401e0298bb5ad827768618bb',
2740
23: b'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
2741
24: b'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
2742
25: b'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
2683
2744
for depth in range(26):
2684
new_version = self.get_simple_key(text_name + '%s' % depth)
2685
text = text + ['line\n']
2745
new_version = self.get_simple_key(text_name + b'%d' % depth)
2746
text = text + [b'line\n']
2686
2747
files.add_lines(new_version, self.get_parents([next_parent]), text)
2687
2748
next_parent = new_version
2688
next_parent = self.get_simple_key('base')
2689
text_name = 'chain2-'
2749
next_parent = self.get_simple_key(b'base')
2750
text_name = b'chain2-'
2691
2752
for depth in range(26):
2692
new_version = self.get_simple_key(text_name + '%s' % depth)
2693
text = text + ['line\n']
2753
new_version = self.get_simple_key(text_name + b'%d' % depth)
2754
text = text + [b'line\n']
2694
2755
files.add_lines(new_version, self.get_parents([next_parent]), text)
2695
2756
next_parent = new_version
2696
2757
target = self.get_versionedfiles('target')
2754
2815
def test_get_sha1s_nonexistent(self):
2755
self.assertEqual({}, self.texts.get_sha1s([("NONEXISTENT",)]))
2816
self.assertEqual({}, self.texts.get_sha1s([(b"NONEXISTENT",)]))
2757
2818
def test_get_sha1s(self):
2758
self._lines["key"] = ["dataline1", "dataline2"]
2759
self.assertEqual({("key",): osutils.sha_strings(self._lines["key"])},
2760
self.texts.get_sha1s([("key",)]))
2819
self._lines[b"key"] = [b"dataline1", b"dataline2"]
2820
self.assertEqual({(b"key",): osutils.sha_strings(self._lines[b"key"])},
2821
self.texts.get_sha1s([(b"key",)]))
2762
2823
def test_get_parent_map(self):
2763
self._parent_map = {"G": ("A", "B")}
2764
self.assertEqual({("G",): (("A",), ("B",))},
2765
self.texts.get_parent_map([("G",), ("L",)]))
2824
self._parent_map = {b"G": (b"A", b"B")}
2825
self.assertEqual({(b"G",): ((b"A",), (b"B",))},
2826
self.texts.get_parent_map([(b"G",), (b"L",)]))
2767
2828
def test_get_record_stream(self):
2768
self._lines["A"] = ["FOO", "BAR"]
2769
it = self.texts.get_record_stream([("A",)], "unordered", True)
2829
self._lines[b"A"] = [b"FOO", b"BAR"]
2830
it = self.texts.get_record_stream([(b"A",)], "unordered", True)
2770
2831
record = next(it)
2771
2832
self.assertEqual("chunked", record.storage_kind)
2772
self.assertEqual("FOOBAR", record.get_bytes_as("fulltext"))
2773
self.assertEqual(["FOO", "BAR"], record.get_bytes_as("chunked"))
2833
self.assertEqual(b"FOOBAR", record.get_bytes_as("fulltext"))
2834
self.assertEqual([b"FOO", b"BAR"], record.get_bytes_as("chunked"))
2775
2836
def test_get_record_stream_absent(self):
2776
it = self.texts.get_record_stream([("A",)], "unordered", True)
2837
it = self.texts.get_record_stream([(b"A",)], "unordered", True)
2777
2838
record = next(it)
2778
2839
self.assertEqual("absent", record.storage_kind)
2780
2841
def test_iter_lines_added_or_present_in_keys(self):
2781
self._lines["A"] = ["FOO", "BAR"]
2782
self._lines["B"] = ["HEY"]
2783
self._lines["C"] = ["Alberta"]
2784
it = self.texts.iter_lines_added_or_present_in_keys([("A",), ("B",)])
2785
self.assertEqual(sorted([("FOO", "A"), ("BAR", "A"), ("HEY", "B")]),
2842
self._lines[b"A"] = [b"FOO", b"BAR"]
2843
self._lines[b"B"] = [b"HEY"]
2844
self._lines[b"C"] = [b"Alberta"]
2845
it = self.texts.iter_lines_added_or_present_in_keys([(b"A",), (b"B",)])
2846
self.assertEqual(sorted([(b"FOO", b"A"), (b"BAR", b"A"), (b"HEY", b"B")]),
2789
2850
class TestOrderingVersionedFilesDecorator(TestCaseWithMemoryTransport):
2809
2870
self.assertEqual([], vf.calls)
2811
2872
def test_get_record_stream_topological(self):
2812
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2813
request_keys = [('B',), ('C',), ('D',), ('A',)]
2873
vf = self.get_ordering_vf(
2874
{(b'A',): 3, (b'B',): 2, (b'C',): 4, (b'D',): 1})
2875
request_keys = [(b'B',), (b'C',), (b'D',), (b'A',)]
2814
2876
keys = [r.key for r in vf.get_record_stream(request_keys,
2815
'topological', False)]
2877
'topological', False)]
2816
2878
# We should have gotten the keys in topological order
2817
self.assertEqual([('A',), ('B',), ('C',), ('D',)], keys)
2879
self.assertEqual([(b'A',), (b'B',), (b'C',), (b'D',)], keys)
2818
2880
# And recorded that the request was made
2819
2881
self.assertEqual([('get_record_stream', request_keys, 'topological',
2820
2882
False)], vf.calls)
2822
2884
def test_get_record_stream_ordered(self):
2823
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2824
request_keys = [('B',), ('C',), ('D',), ('A',)]
2885
vf = self.get_ordering_vf(
2886
{(b'A',): 3, (b'B',): 2, (b'C',): 4, (b'D',): 1})
2887
request_keys = [(b'B',), (b'C',), (b'D',), (b'A',)]
2825
2888
keys = [r.key for r in vf.get_record_stream(request_keys,
2826
'unordered', False)]
2889
'unordered', False)]
2827
2890
# They should be returned based on their priority
2828
self.assertEqual([('D',), ('B',), ('A',), ('C',)], keys)
2891
self.assertEqual([(b'D',), (b'B',), (b'A',), (b'C',)], keys)
2829
2892
# And the request recorded
2830
2893
self.assertEqual([('get_record_stream', request_keys, 'unordered',
2831
2894
False)], vf.calls)
2833
2896
def test_get_record_stream_implicit_order(self):
2834
vf = self.get_ordering_vf({('B',): 2, ('D',): 1})
2835
request_keys = [('B',), ('C',), ('D',), ('A',)]
2897
vf = self.get_ordering_vf({(b'B',): 2, (b'D',): 1})
2898
request_keys = [(b'B',), (b'C',), (b'D',), (b'A',)]
2836
2899
keys = [r.key for r in vf.get_record_stream(request_keys,
2837
'unordered', False)]
2900
'unordered', False)]
2838
2901
# A and C are not in the map, so they get sorted to the front. A comes
2839
2902
# before C alphabetically, so it comes back first
2840
self.assertEqual([('A',), ('C',), ('D',), ('B',)], keys)
2903
self.assertEqual([(b'A',), (b'C',), (b'D',), (b'B',)], keys)
2841
2904
# And the request recorded
2842
2905
self.assertEqual([('get_record_stream', request_keys, 'unordered',
2843
2906
False)], vf.calls)