83
82
:param trailing_eol: If True end the last line with \n.
87
b'base': ((b'origin',),),
88
b'left': ((b'base',),),
89
b'right': ((b'base',),),
90
b'merged': ((b'left',), (b'right',)),
86
'base': (('origin',),),
88
'right': (('base',),),
89
'merged': (('left',), ('right',)),
92
91
# insert a diamond graph to exercise deltas and merges.
97
f.add_lines(b'origin', [], [b'origin' + last_char])
98
f.add_lines(b'base', [b'origin'], [b'base' + last_char])
99
f.add_lines(b'left', [b'base'], [b'base\n', b'left' + last_char])
96
f.add_lines('origin', [], ['origin' + last_char])
97
f.add_lines('base', ['origin'], ['base' + last_char])
98
f.add_lines('left', ['base'], ['base\n', 'left' + last_char])
101
f.add_lines(b'right', [b'base'],
102
[b'base\n', b'right' + last_char])
103
f.add_lines(b'merged', [b'left', b'right'],
104
[b'base\n', b'left\n', b'right\n', b'merged' + last_char])
100
f.add_lines('right', ['base'],
101
['base\n', 'right' + last_char])
102
f.add_lines('merged', ['left', 'right'],
103
['base\n', 'left\n', 'right\n', 'merged' + last_char])
105
104
return f, parents
108
107
def get_diamond_files(files, key_length, trailing_eol=True, left_only=False,
109
nograph=False, nokeys=False):
108
nograph=False, nokeys=False):
110
109
"""Get a diamond graph to exercise deltas and merges.
112
111
This creates a 5-node graph in files. If files supports 2-length keys two
151
148
# we loop over each key because that spreads the inserts across prefixes,
152
149
# which is how commit operates.
153
150
for prefix in prefixes:
154
result.append(files.add_lines(prefix + get_key(b'origin'), (),
155
[b'origin' + last_char]))
156
for prefix in prefixes:
157
result.append(files.add_lines(prefix + get_key(b'base'),
158
get_parents([(b'origin',)]), [b'base' + last_char]))
159
for prefix in prefixes:
160
result.append(files.add_lines(prefix + get_key(b'left'),
161
get_parents([(b'base',)]),
162
[b'base\n', b'left' + last_char]))
151
result.append(files.add_lines(prefix + get_key('origin'), (),
152
['origin' + last_char]))
153
for prefix in prefixes:
154
result.append(files.add_lines(prefix + get_key('base'),
155
get_parents([('origin',)]), ['base' + last_char]))
156
for prefix in prefixes:
157
result.append(files.add_lines(prefix + get_key('left'),
158
get_parents([('base',)]),
159
['base\n', 'left' + last_char]))
163
160
if not left_only:
164
161
for prefix in prefixes:
165
result.append(files.add_lines(prefix + get_key(b'right'),
166
get_parents([(b'base',)]),
167
[b'base\n', b'right' + last_char]))
162
result.append(files.add_lines(prefix + get_key('right'),
163
get_parents([('base',)]),
164
['base\n', 'right' + last_char]))
168
165
for prefix in prefixes:
169
result.append(files.add_lines(prefix + get_key(b'merged'),
171
[(b'left',), (b'right',)]),
172
[b'base\n', b'left\n', b'right\n', b'merged' + last_char]))
166
result.append(files.add_lines(prefix + get_key('merged'),
167
get_parents([('left',), ('right',)]),
168
['base\n', 'left\n', 'right\n', 'merged' + last_char]))
189
185
def test_add(self):
190
186
f = self.get_file()
191
f.add_lines(b'r0', [], [b'a\n', b'b\n'])
192
f.add_lines(b'r1', [b'r0'], [b'b\n', b'c\n'])
187
f.add_lines('r0', [], ['a\n', 'b\n'])
188
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
194
189
def verify_file(f):
195
190
versions = f.versions()
196
self.assertTrue(b'r0' in versions)
197
self.assertTrue(b'r1' in versions)
198
self.assertEqual(f.get_lines(b'r0'), [b'a\n', b'b\n'])
199
self.assertEqual(f.get_text(b'r0'), b'a\nb\n')
200
self.assertEqual(f.get_lines(b'r1'), [b'b\n', b'c\n'])
191
self.assertTrue('r0' in versions)
192
self.assertTrue('r1' in versions)
193
self.assertEqual(f.get_lines('r0'), ['a\n', 'b\n'])
194
self.assertEqual(f.get_text('r0'), 'a\nb\n')
195
self.assertEqual(f.get_lines('r1'), ['b\n', 'c\n'])
201
196
self.assertEqual(2, len(f))
202
197
self.assertEqual(2, f.num_versions())
204
199
self.assertRaises(RevisionNotPresent,
205
f.add_lines, b'r2', [b'foo'], [])
200
f.add_lines, 'r2', ['foo'], [])
206
201
self.assertRaises(RevisionAlreadyPresent,
207
f.add_lines, b'r1', [], [])
202
f.add_lines, 'r1', [], [])
209
204
# this checks that reopen with create=True does not break anything.
210
205
f = self.reopen_file(create=True)
213
208
def test_adds_with_parent_texts(self):
214
209
f = self.get_file()
215
210
parent_texts = {}
216
_, _, parent_texts[b'r0'] = f.add_lines(b'r0', [], [b'a\n', b'b\n'])
211
_, _, parent_texts['r0'] = f.add_lines('r0', [], ['a\n', 'b\n'])
218
_, _, parent_texts[b'r1'] = f.add_lines_with_ghosts(b'r1',
219
[b'r0', b'ghost'], [b'b\n', b'c\n'], parent_texts=parent_texts)
213
_, _, parent_texts['r1'] = f.add_lines_with_ghosts('r1',
214
['r0', 'ghost'], ['b\n', 'c\n'], parent_texts=parent_texts)
220
215
except NotImplementedError:
221
216
# if the format doesn't support ghosts, just add normally.
222
_, _, parent_texts[b'r1'] = f.add_lines(b'r1',
223
[b'r0'], [b'b\n', b'c\n'], parent_texts=parent_texts)
224
f.add_lines(b'r2', [b'r1'], [b'c\n', b'd\n'],
225
parent_texts=parent_texts)
226
self.assertNotEqual(None, parent_texts[b'r0'])
227
self.assertNotEqual(None, parent_texts[b'r1'])
217
_, _, parent_texts['r1'] = f.add_lines('r1',
218
['r0'], ['b\n', 'c\n'], parent_texts=parent_texts)
219
f.add_lines('r2', ['r1'], ['c\n', 'd\n'], parent_texts=parent_texts)
220
self.assertNotEqual(None, parent_texts['r0'])
221
self.assertNotEqual(None, parent_texts['r1'])
229
222
def verify_file(f):
230
223
versions = f.versions()
231
self.assertTrue(b'r0' in versions)
232
self.assertTrue(b'r1' in versions)
233
self.assertTrue(b'r2' in versions)
234
self.assertEqual(f.get_lines(b'r0'), [b'a\n', b'b\n'])
235
self.assertEqual(f.get_lines(b'r1'), [b'b\n', b'c\n'])
236
self.assertEqual(f.get_lines(b'r2'), [b'c\n', b'd\n'])
224
self.assertTrue('r0' in versions)
225
self.assertTrue('r1' in versions)
226
self.assertTrue('r2' in versions)
227
self.assertEqual(f.get_lines('r0'), ['a\n', 'b\n'])
228
self.assertEqual(f.get_lines('r1'), ['b\n', 'c\n'])
229
self.assertEqual(f.get_lines('r2'), ['c\n', 'd\n'])
237
230
self.assertEqual(3, f.num_versions())
238
origins = f.annotate(b'r1')
239
self.assertEqual(origins[0][0], b'r0')
240
self.assertEqual(origins[1][0], b'r1')
241
origins = f.annotate(b'r2')
242
self.assertEqual(origins[0][0], b'r1')
243
self.assertEqual(origins[1][0], b'r2')
231
origins = f.annotate('r1')
232
self.assertEqual(origins[0][0], 'r0')
233
self.assertEqual(origins[1][0], 'r1')
234
origins = f.annotate('r2')
235
self.assertEqual(origins[0][0], 'r1')
236
self.assertEqual(origins[1][0], 'r2')
246
239
f = self.reopen_file()
265
258
vf = self.get_file()
266
259
if isinstance(vf, WeaveFile):
267
260
raise TestSkipped("WeaveFile ignores left_matching_blocks")
268
vf.add_lines(b'1', [], [b'a\n'])
269
vf.add_lines(b'2', [b'1'], [b'a\n', b'a\n', b'a\n'],
261
vf.add_lines('1', [], ['a\n'])
262
vf.add_lines('2', ['1'], ['a\n', 'a\n', 'a\n'],
270
263
left_matching_blocks=[(0, 0, 1), (1, 3, 0)])
271
self.assertEqual([b'a\n', b'a\n', b'a\n'], vf.get_lines(b'2'))
272
vf.add_lines(b'3', [b'1'], [b'a\n', b'a\n', b'a\n'],
264
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('2'))
265
vf.add_lines('3', ['1'], ['a\n', 'a\n', 'a\n'],
273
266
left_matching_blocks=[(0, 2, 1), (1, 3, 0)])
274
self.assertEqual([b'a\n', b'a\n', b'a\n'], vf.get_lines(b'3'))
267
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('3'))
276
269
def test_inline_newline_throws(self):
277
270
# \r characters are not permitted in lines being added
278
271
vf = self.get_file()
279
272
self.assertRaises(errors.BzrBadParameterContainsNewline,
280
vf.add_lines, b'a', [], [b'a\n\n'])
273
vf.add_lines, 'a', [], ['a\n\n'])
281
274
self.assertRaises(
282
275
(errors.BzrBadParameterContainsNewline, NotImplementedError),
283
vf.add_lines_with_ghosts, b'a', [], [b'a\n\n'])
276
vf.add_lines_with_ghosts, 'a', [], ['a\n\n'])
284
277
# but inline CR's are allowed
285
vf.add_lines(b'a', [], [b'a\r\n'])
278
vf.add_lines('a', [], ['a\r\n'])
287
vf.add_lines_with_ghosts(b'b', [], [b'a\r\n'])
280
vf.add_lines_with_ghosts('b', [], ['a\r\n'])
288
281
except NotImplementedError:
291
284
def test_add_reserved(self):
292
285
vf = self.get_file()
293
286
self.assertRaises(errors.ReservedId,
294
vf.add_lines, b'a:', [], [b'a\n', b'b\n', b'c\n'])
287
vf.add_lines, 'a:', [], ['a\n', 'b\n', 'c\n'])
296
289
def test_add_lines_nostoresha(self):
297
290
"""When nostore_sha is supplied using old content raises."""
298
291
vf = self.get_file()
299
empty_text = (b'a', [])
300
sample_text_nl = (b'b', [b"foo\n", b"bar\n"])
301
sample_text_no_nl = (b'c', [b"foo\n", b"bar"])
292
empty_text = ('a', [])
293
sample_text_nl = ('b', ["foo\n", "bar\n"])
294
sample_text_no_nl = ('c', ["foo\n", "bar"])
303
296
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
304
297
sha, _, _ = vf.add_lines(version, [], lines)
306
299
# we now have a copy of all the lines in the vf.
307
300
for sha, (version, lines) in zip(
308
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
301
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
309
302
self.assertRaises(errors.ExistingContent,
310
vf.add_lines, version + b"2", [], lines,
303
vf.add_lines, version + "2", [], lines,
312
305
# and no new version should have been added.
313
306
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
316
309
def test_add_lines_with_ghosts_nostoresha(self):
317
310
"""When nostore_sha is supplied using old content raises."""
318
311
vf = self.get_file()
319
empty_text = (b'a', [])
320
sample_text_nl = (b'b', [b"foo\n", b"bar\n"])
321
sample_text_no_nl = (b'c', [b"foo\n", b"bar"])
312
empty_text = ('a', [])
313
sample_text_nl = ('b', ["foo\n", "bar\n"])
314
sample_text_no_nl = ('c', ["foo\n", "bar"])
323
316
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
324
317
sha, _, _ = vf.add_lines(version, [], lines)
326
319
# we now have a copy of all the lines in the vf.
327
320
# is the test applicable to this vf implementation?
329
vf.add_lines_with_ghosts(b'd', [], [])
322
vf.add_lines_with_ghosts('d', [], [])
330
323
except NotImplementedError:
331
324
raise TestSkipped("add_lines_with_ghosts is optional")
332
325
for sha, (version, lines) in zip(
333
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
326
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
334
327
self.assertRaises(errors.ExistingContent,
335
vf.add_lines_with_ghosts, version + b"2", [], lines,
328
vf.add_lines_with_ghosts, version + "2", [], lines,
337
330
# and no new version should have been added.
338
331
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
341
334
def test_add_lines_return_value(self):
342
335
# add_lines should return the sha1 and the text size.
343
336
vf = self.get_file()
344
empty_text = (b'a', [])
345
sample_text_nl = (b'b', [b"foo\n", b"bar\n"])
346
sample_text_no_nl = (b'c', [b"foo\n", b"bar"])
337
empty_text = ('a', [])
338
sample_text_nl = ('b', ["foo\n", "bar\n"])
339
sample_text_no_nl = ('c', ["foo\n", "bar"])
347
340
# check results for the three cases:
348
341
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
349
342
# the first two elements are the same for all versioned files:
428
421
# Test adding this in two situations:
429
422
# On top of a new insertion
430
423
vf = self.get_file('fulltext')
431
vf.add_lines(b'noeol', [], [b'line'])
432
vf.add_lines(b'noeol2', [b'noeol'], [b'newline\n', b'line'],
433
left_matching_blocks=[(0, 1, 1)])
434
self.assertEqualDiff(b'newline\nline', vf.get_text(b'noeol2'))
424
vf.add_lines('noeol', [], ['line'])
425
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
426
left_matching_blocks=[(0, 1, 1)])
427
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
435
428
# On top of a delta
436
429
vf = self.get_file('delta')
437
vf.add_lines(b'base', [], [b'line'])
438
vf.add_lines(b'noeol', [b'base'], [b'prelude\n', b'line'])
439
vf.add_lines(b'noeol2', [b'noeol'], [b'newline\n', b'line'],
440
left_matching_blocks=[(1, 1, 1)])
441
self.assertEqualDiff(b'newline\nline', vf.get_text(b'noeol2'))
430
vf.add_lines('base', [], ['line'])
431
vf.add_lines('noeol', ['base'], ['prelude\n', 'line'])
432
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
433
left_matching_blocks=[(1, 1, 1)])
434
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
443
436
def test_make_mpdiffs(self):
444
437
from breezy import multiparent
455
448
def test_make_mpdiffs_with_ghosts(self):
456
449
vf = self.get_file('foo')
458
vf.add_lines_with_ghosts(b'text', [b'ghost'], [b'line\n'])
451
vf.add_lines_with_ghosts('text', ['ghost'], ['line\n'])
459
452
except NotImplementedError:
460
453
# old Weave formats do not allow ghosts
462
self.assertRaises(errors.RevisionNotPresent,
463
vf.make_mpdiffs, [b'ghost'])
455
self.assertRaises(errors.RevisionNotPresent, vf.make_mpdiffs, ['ghost'])
465
457
def _setup_for_deltas(self, f):
466
458
self.assertFalse(f.has_version('base'))
467
459
# add texts that should trip the knit maximum delta chain threshold
468
460
# as well as doing parallel chains of data in knits.
469
461
# this is done by two chains of 25 insertions
470
f.add_lines(b'base', [], [b'line\n'])
471
f.add_lines(b'noeol', [b'base'], [b'line'])
462
f.add_lines('base', [], ['line\n'])
463
f.add_lines('noeol', ['base'], ['line'])
472
464
# detailed eol tests:
473
465
# shared last line with parent no-eol
474
f.add_lines(b'noeolsecond', [b'noeol'], [b'line\n', b'line'])
466
f.add_lines('noeolsecond', ['noeol'], ['line\n', 'line'])
475
467
# differing last line with parent, both no-eol
476
f.add_lines(b'noeolnotshared', [b'noeolsecond'], [b'line\n', b'phone'])
468
f.add_lines('noeolnotshared', ['noeolsecond'], ['line\n', 'phone'])
477
469
# add eol following a noneol parent, change content
478
f.add_lines(b'eol', [b'noeol'], [b'phone\n'])
470
f.add_lines('eol', ['noeol'], ['phone\n'])
479
471
# add eol following a noneol parent, no change content
480
f.add_lines(b'eolline', [b'noeol'], [b'line\n'])
472
f.add_lines('eolline', ['noeol'], ['line\n'])
481
473
# noeol with no parents:
482
f.add_lines(b'noeolbase', [], [b'line'])
474
f.add_lines('noeolbase', [], ['line'])
483
475
# noeol preceeding its leftmost parent in the output:
484
476
# this is done by making it a merge of two parents with no common
485
477
# anestry: noeolbase and noeol with the
486
478
# later-inserted parent the leftmost.
487
f.add_lines(b'eolbeforefirstparent', [
488
b'noeolbase', b'noeol'], [b'line'])
479
f.add_lines('eolbeforefirstparent', ['noeolbase', 'noeol'], ['line'])
489
480
# two identical eol texts
490
f.add_lines(b'noeoldup', [b'noeol'], [b'line'])
491
next_parent = b'base'
492
text_name = b'chain1-'
494
sha1s = {0: b'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
495
1: b'45e21ea146a81ea44a821737acdb4f9791c8abe7',
496
2: b'e1f11570edf3e2a070052366c582837a4fe4e9fa',
497
3: b'26b4b8626da827088c514b8f9bbe4ebf181edda1',
498
4: b'e28a5510be25ba84d31121cff00956f9970ae6f6',
499
5: b'd63ec0ce22e11dcf65a931b69255d3ac747a318d',
500
6: b'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
501
7: b'95c14da9cafbf828e3e74a6f016d87926ba234ab',
502
8: b'779e9a0b28f9f832528d4b21e17e168c67697272',
503
9: b'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
504
10: b'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
505
11: b'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
506
12: b'31a2286267f24d8bedaa43355f8ad7129509ea85',
507
13: b'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
508
14: b'2c4b1736566b8ca6051e668de68650686a3922f2',
509
15: b'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
510
16: b'b0d2e18d3559a00580f6b49804c23fea500feab3',
511
17: b'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
512
18: b'5cf64a3459ae28efa60239e44b20312d25b253f3',
513
19: b'1ebed371807ba5935958ad0884595126e8c4e823',
514
20: b'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
515
21: b'01edc447978004f6e4e962b417a4ae1955b6fe5d',
516
22: b'd8d8dc49c4bf0bab401e0298bb5ad827768618bb',
517
23: b'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
518
24: b'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
519
25: b'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
481
f.add_lines('noeoldup', ['noeol'], ['line'])
483
text_name = 'chain1-'
485
sha1s = {0: 'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
486
1: '45e21ea146a81ea44a821737acdb4f9791c8abe7',
487
2: 'e1f11570edf3e2a070052366c582837a4fe4e9fa',
488
3: '26b4b8626da827088c514b8f9bbe4ebf181edda1',
489
4: 'e28a5510be25ba84d31121cff00956f9970ae6f6',
490
5: 'd63ec0ce22e11dcf65a931b69255d3ac747a318d',
491
6: '2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
492
7: '95c14da9cafbf828e3e74a6f016d87926ba234ab',
493
8: '779e9a0b28f9f832528d4b21e17e168c67697272',
494
9: '1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
495
10: '131a2ae712cf51ed62f143e3fbac3d4206c25a05',
496
11: 'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
497
12: '31a2286267f24d8bedaa43355f8ad7129509ea85',
498
13: 'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
499
14: '2c4b1736566b8ca6051e668de68650686a3922f2',
500
15: '5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
501
16: 'b0d2e18d3559a00580f6b49804c23fea500feab3',
502
17: '8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
503
18: '5cf64a3459ae28efa60239e44b20312d25b253f3',
504
19: '1ebed371807ba5935958ad0884595126e8c4e823',
505
20: '2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
506
21: '01edc447978004f6e4e962b417a4ae1955b6fe5d',
507
22: 'd8d8dc49c4bf0bab401e0298bb5ad827768618bb',
508
23: 'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
509
24: 'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
510
25: 'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
521
512
for depth in range(26):
522
new_version = text_name + b'%d' % depth
523
text = text + [b'line\n']
513
new_version = text_name + '%s' % depth
514
text = text + ['line\n']
524
515
f.add_lines(new_version, [next_parent], text)
525
516
next_parent = new_version
526
next_parent = b'base'
527
text_name = b'chain2-'
518
text_name = 'chain2-'
529
520
for depth in range(26):
530
new_version = text_name + b'%d' % depth
531
text = text + [b'line\n']
521
new_version = text_name + '%s' % depth
522
text = text + ['line\n']
532
523
f.add_lines(new_version, [next_parent], text)
533
524
next_parent = new_version
536
527
def test_ancestry(self):
537
528
f = self.get_file()
538
529
self.assertEqual([], f.get_ancestry([]))
539
f.add_lines(b'r0', [], [b'a\n', b'b\n'])
540
f.add_lines(b'r1', [b'r0'], [b'b\n', b'c\n'])
541
f.add_lines(b'r2', [b'r0'], [b'b\n', b'c\n'])
542
f.add_lines(b'r3', [b'r2'], [b'b\n', b'c\n'])
543
f.add_lines(b'rM', [b'r1', b'r2'], [b'b\n', b'c\n'])
530
f.add_lines('r0', [], ['a\n', 'b\n'])
531
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
532
f.add_lines('r2', ['r0'], ['b\n', 'c\n'])
533
f.add_lines('r3', ['r2'], ['b\n', 'c\n'])
534
f.add_lines('rM', ['r1', 'r2'], ['b\n', 'c\n'])
544
535
self.assertEqual([], f.get_ancestry([]))
545
versions = f.get_ancestry([b'rM'])
536
versions = f.get_ancestry(['rM'])
546
537
# there are some possibilities:
550
541
# so we check indexes
551
r0 = versions.index(b'r0')
552
r1 = versions.index(b'r1')
553
r2 = versions.index(b'r2')
554
self.assertFalse(b'r3' in versions)
555
rM = versions.index(b'rM')
542
r0 = versions.index('r0')
543
r1 = versions.index('r1')
544
r2 = versions.index('r2')
545
self.assertFalse('r3' in versions)
546
rM = versions.index('rM')
556
547
self.assertTrue(r0 < r1)
557
548
self.assertTrue(r0 < r2)
558
549
self.assertTrue(r1 < rM)
559
550
self.assertTrue(r2 < rM)
561
552
self.assertRaises(RevisionNotPresent,
562
f.get_ancestry, [b'rM', b'rX'])
553
f.get_ancestry, ['rM', 'rX'])
564
self.assertEqual(set(f.get_ancestry(b'rM')),
565
set(f.get_ancestry(b'rM', topo_sorted=False)))
555
self.assertEqual(set(f.get_ancestry('rM')),
556
set(f.get_ancestry('rM', topo_sorted=False)))
567
558
def test_mutate_after_finish(self):
568
559
self._transaction = 'before'
569
560
f = self.get_file()
570
561
self._transaction = 'after'
571
self.assertRaises(errors.OutSideTransaction, f.add_lines, b'', [], [])
572
self.assertRaises(errors.OutSideTransaction,
573
f.add_lines_with_ghosts, b'', [], [])
562
self.assertRaises(errors.OutSideTransaction, f.add_lines, '', [], [])
563
self.assertRaises(errors.OutSideTransaction, f.add_lines_with_ghosts, '', [], [])
575
565
def test_copy_to(self):
576
566
f = self.get_file()
577
f.add_lines(b'0', [], [b'a\n'])
567
f.add_lines('0', [], ['a\n'])
578
568
t = MemoryTransport()
579
569
f.copy_to('foo', t)
580
570
for suffix in self.get_factory().get_suffixes():
588
578
def test_get_parent_map(self):
589
579
f = self.get_file()
590
f.add_lines(b'r0', [], [b'a\n', b'b\n'])
592
{b'r0': ()}, f.get_parent_map([b'r0']))
593
f.add_lines(b'r1', [b'r0'], [b'a\n', b'b\n'])
595
{b'r1': (b'r0',)}, f.get_parent_map([b'r1']))
599
f.get_parent_map([b'r0', b'r1']))
600
f.add_lines(b'r2', [], [b'a\n', b'b\n'])
601
f.add_lines(b'r3', [], [b'a\n', b'b\n'])
602
f.add_lines(b'm', [b'r0', b'r1', b'r2', b'r3'], [b'a\n', b'b\n'])
604
{b'm': (b'r0', b'r1', b'r2', b'r3')}, f.get_parent_map([b'm']))
605
self.assertEqual({}, f.get_parent_map(b'y'))
609
f.get_parent_map([b'r0', b'y', b'r1']))
580
f.add_lines('r0', [], ['a\n', 'b\n'])
582
{'r0':()}, f.get_parent_map(['r0']))
583
f.add_lines('r1', ['r0'], ['a\n', 'b\n'])
585
{'r1':('r0',)}, f.get_parent_map(['r1']))
589
f.get_parent_map(['r0', 'r1']))
590
f.add_lines('r2', [], ['a\n', 'b\n'])
591
f.add_lines('r3', [], ['a\n', 'b\n'])
592
f.add_lines('m', ['r0', 'r1', 'r2', 'r3'], ['a\n', 'b\n'])
594
{'m':('r0', 'r1', 'r2', 'r3')}, f.get_parent_map(['m']))
595
self.assertEqual({}, f.get_parent_map('y'))
599
f.get_parent_map(['r0', 'y', 'r1']))
611
601
def test_annotate(self):
612
602
f = self.get_file()
613
f.add_lines(b'r0', [], [b'a\n', b'b\n'])
614
f.add_lines(b'r1', [b'r0'], [b'c\n', b'b\n'])
615
origins = f.annotate(b'r1')
616
self.assertEqual(origins[0][0], b'r1')
617
self.assertEqual(origins[1][0], b'r0')
603
f.add_lines('r0', [], ['a\n', 'b\n'])
604
f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
605
origins = f.annotate('r1')
606
self.assertEqual(origins[0][0], 'r1')
607
self.assertEqual(origins[1][0], 'r0')
619
609
self.assertRaises(RevisionNotPresent,
622
612
def test_detection(self):
623
613
# Test weaves detect corruption.
666
656
vf = self.get_file()
667
657
# add a base to get included
668
vf.add_lines(b'base', [], [b'base\n'])
658
vf.add_lines('base', [], ['base\n'])
669
659
# add a ancestor to be included on one side
670
vf.add_lines(b'lancestor', [], [b'lancestor\n'])
660
vf.add_lines('lancestor', [], ['lancestor\n'])
671
661
# add a ancestor to be included on the other side
672
vf.add_lines(b'rancestor', [b'base'], [b'rancestor\n'])
662
vf.add_lines('rancestor', ['base'], ['rancestor\n'])
673
663
# add a child of rancestor with no eofile-nl
674
vf.add_lines(b'child', [b'rancestor'], [b'base\n', b'child\n'])
664
vf.add_lines('child', ['rancestor'], ['base\n', 'child\n'])
675
665
# add a child of lancestor and base to join the two roots
676
vf.add_lines(b'otherchild',
677
[b'lancestor', b'base'],
678
[b'base\n', b'lancestor\n', b'otherchild\n'])
666
vf.add_lines('otherchild',
667
['lancestor', 'base'],
668
['base\n', 'lancestor\n', 'otherchild\n'])
680
669
def iter_with_versions(versions, expected):
681
670
# now we need to see what lines are returned, and how often.
683
672
progress = InstrumentedProgress()
684
673
# iterate over the lines
685
674
for line in vf.iter_lines_added_or_present_in_versions(versions,
687
676
lines.setdefault(line, 0)
689
if [] != progress.updates:
678
if []!= progress.updates:
690
679
self.assertEqual(expected, progress.updates)
692
lines = iter_with_versions([b'child', b'otherchild'],
681
lines = iter_with_versions(['child', 'otherchild'],
693
682
[('Walking content', 0, 2),
694
683
('Walking content', 1, 2),
695
684
('Walking content', 2, 2)])
696
685
# we must see child and otherchild
697
self.assertTrue(lines[(b'child\n', b'child')] > 0)
698
self.assertTrue(lines[(b'otherchild\n', b'otherchild')] > 0)
686
self.assertTrue(lines[('child\n', 'child')] > 0)
687
self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
699
688
# we dont care if we got more than that.
723
712
parent_id_unicode = u'b\xbfse'
724
713
parent_id_utf8 = parent_id_unicode.encode('utf8')
726
vf.add_lines_with_ghosts(b'notbxbfse', [parent_id_utf8], [])
715
vf.add_lines_with_ghosts('notbxbfse', [parent_id_utf8], [])
727
716
except NotImplementedError:
728
717
# check the other ghost apis are also not implemented
729
self.assertRaises(NotImplementedError,
730
vf.get_ancestry_with_ghosts, [b'foo'])
731
self.assertRaises(NotImplementedError,
732
vf.get_parents_with_ghosts, b'foo')
718
self.assertRaises(NotImplementedError, vf.get_ancestry_with_ghosts, ['foo'])
719
self.assertRaises(NotImplementedError, vf.get_parents_with_ghosts, 'foo')
734
721
vf = self.reopen_file()
735
722
# test key graph related apis: getncestry, _graph, get_parents
737
724
# - these are ghost unaware and must not be reflect ghosts
738
self.assertEqual([b'notbxbfse'], vf.get_ancestry(b'notbxbfse'))
725
self.assertEqual(['notbxbfse'], vf.get_ancestry('notbxbfse'))
739
726
self.assertFalse(vf.has_version(parent_id_utf8))
740
727
# we have _with_ghost apis to give us ghost information.
741
self.assertEqual([parent_id_utf8, b'notbxbfse'],
742
vf.get_ancestry_with_ghosts([b'notbxbfse']))
743
self.assertEqual([parent_id_utf8],
744
vf.get_parents_with_ghosts(b'notbxbfse'))
728
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry_with_ghosts(['notbxbfse']))
729
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
745
730
# if we add something that is a ghost of another, it should correct the
746
731
# results of the prior apis
747
732
vf.add_lines(parent_id_utf8, [], [])
748
self.assertEqual([parent_id_utf8, b'notbxbfse'],
749
vf.get_ancestry([b'notbxbfse']))
750
self.assertEqual({b'notbxbfse': (parent_id_utf8,)},
751
vf.get_parent_map([b'notbxbfse']))
733
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry(['notbxbfse']))
734
self.assertEqual({'notbxbfse':(parent_id_utf8,)},
735
vf.get_parent_map(['notbxbfse']))
752
736
self.assertTrue(vf.has_version(parent_id_utf8))
753
737
# we have _with_ghost apis to give us ghost information.
754
self.assertEqual([parent_id_utf8, b'notbxbfse'],
755
vf.get_ancestry_with_ghosts([b'notbxbfse']))
756
self.assertEqual([parent_id_utf8],
757
vf.get_parents_with_ghosts(b'notbxbfse'))
738
self.assertEqual([parent_id_utf8, 'notbxbfse'],
739
vf.get_ancestry_with_ghosts(['notbxbfse']))
740
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
759
742
def test_add_lines_with_ghosts_after_normal_revs(self):
760
743
# some versioned file formats allow lines to be added with parent
764
747
vf = self.get_file()
765
748
# probe for ghost support
767
vf.add_lines_with_ghosts(b'base', [], [b'line\n', b'line_b\n'])
750
vf.add_lines_with_ghosts('base', [], ['line\n', 'line_b\n'])
768
751
except NotImplementedError:
770
vf.add_lines_with_ghosts(b'references_ghost',
771
[b'base', b'a_ghost'],
772
[b'line\n', b'line_b\n', b'line_c\n'])
773
origins = vf.annotate(b'references_ghost')
774
self.assertEqual((b'base', b'line\n'), origins[0])
775
self.assertEqual((b'base', b'line_b\n'), origins[1])
776
self.assertEqual((b'references_ghost', b'line_c\n'), origins[2])
753
vf.add_lines_with_ghosts('references_ghost',
755
['line\n', 'line_b\n', 'line_c\n'])
756
origins = vf.annotate('references_ghost')
757
self.assertEqual(('base', 'line\n'), origins[0])
758
self.assertEqual(('base', 'line_b\n'), origins[1])
759
self.assertEqual(('references_ghost', 'line_c\n'), origins[2])
778
761
def test_readonly_mode(self):
779
762
t = self.get_transport()
780
763
factory = self.get_factory()
781
764
vf = factory('id', t, 0o777, create=True, access_mode='w')
782
765
vf = factory('id', t, access_mode='r')
783
self.assertRaises(errors.ReadOnlyError, vf.add_lines, b'base', [], [])
766
self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
784
767
self.assertRaises(errors.ReadOnlyError,
785
768
vf.add_lines_with_ghosts,
815
798
w = WeaveFile('foo', self.get_transport(),
817
800
get_scope=self.get_transaction)
818
w.add_lines(b'v1', [], [b'hello\n'])
819
w.add_lines(b'v2', [b'v1'], [b'hello\n', b'there\n'])
801
w.add_lines('v1', [], ['hello\n'])
802
w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
821
804
# We are going to invasively corrupt the text
822
805
# Make sure the internals of weave are the same
823
self.assertEqual([(b'{', 0), b'hello\n', (b'}', None), (b'{', 1), b'there\n', (b'}', None)
806
self.assertEqual([('{', 0)
826
self.assertEqual([b'f572d396fae9206628714fb2ce00f72e94f2258f', b'90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
814
self.assertEqual(['f572d396fae9206628714fb2ce00f72e94f2258f'
815
, '90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
831
w._weave[4] = b'There\n'
820
w._weave[4] = 'There\n'
834
823
def get_file_corrupted_checksum(self):
835
824
w = self.get_file_corrupted_text()
837
w._weave[4] = b'there\n'
838
self.assertEqual(b'hello\nthere\n', w.get_text(b'v2'))
826
w._weave[4] = 'there\n'
827
self.assertEqual('hello\nthere\n', w.get_text('v2'))
840
# Invalid checksum, first digit changed
841
w._sha1s[1] = b'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'
829
#Invalid checksum, first digit changed
830
w._sha1s[1] = 'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'
844
833
def reopen_file(self, name='foo', create=False):
869
858
self.plan_merge_vf.fallback_versionedfiles.extend([self.vf1, self.vf2])
871
860
def test_add_lines(self):
872
self.plan_merge_vf.add_lines((b'root', b'a:'), [], [])
873
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
874
(b'root', b'a'), [], [])
875
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
876
(b'root', b'a:'), None, [])
877
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
878
(b'root', b'a:'), [], None)
861
self.plan_merge_vf.add_lines(('root', 'a:'), [], [])
862
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
863
('root', 'a'), [], [])
864
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
865
('root', 'a:'), None, [])
866
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
867
('root', 'a:'), [], None)
880
869
def setup_abcde(self):
881
self.vf1.add_lines((b'root', b'A'), [], [b'a'])
882
self.vf1.add_lines((b'root', b'B'), [(b'root', b'A')], [b'b'])
883
self.vf2.add_lines((b'root', b'C'), [], [b'c'])
884
self.vf2.add_lines((b'root', b'D'), [(b'root', b'C')], [b'd'])
885
self.plan_merge_vf.add_lines((b'root', b'E:'),
886
[(b'root', b'B'), (b'root', b'D')], [b'e'])
870
self.vf1.add_lines(('root', 'A'), [], ['a'])
871
self.vf1.add_lines(('root', 'B'), [('root', 'A')], ['b'])
872
self.vf2.add_lines(('root', 'C'), [], ['c'])
873
self.vf2.add_lines(('root', 'D'), [('root', 'C')], ['d'])
874
self.plan_merge_vf.add_lines(('root', 'E:'),
875
[('root', 'B'), ('root', 'D')], ['e'])
888
877
def test_get_parents(self):
889
878
self.setup_abcde()
890
self.assertEqual({(b'root', b'B'): ((b'root', b'A'),)},
891
self.plan_merge_vf.get_parent_map([(b'root', b'B')]))
892
self.assertEqual({(b'root', b'D'): ((b'root', b'C'),)},
893
self.plan_merge_vf.get_parent_map([(b'root', b'D')]))
894
self.assertEqual({(b'root', b'E:'): ((b'root', b'B'), (b'root', b'D'))},
895
self.plan_merge_vf.get_parent_map([(b'root', b'E:')]))
879
self.assertEqual({('root', 'B'):(('root', 'A'),)},
880
self.plan_merge_vf.get_parent_map([('root', 'B')]))
881
self.assertEqual({('root', 'D'):(('root', 'C'),)},
882
self.plan_merge_vf.get_parent_map([('root', 'D')]))
883
self.assertEqual({('root', 'E:'):(('root', 'B'), ('root', 'D'))},
884
self.plan_merge_vf.get_parent_map([('root', 'E:')]))
896
885
self.assertEqual({},
897
self.plan_merge_vf.get_parent_map([(b'root', b'F')]))
886
self.plan_merge_vf.get_parent_map([('root', 'F')]))
898
887
self.assertEqual({
899
(b'root', b'B'): ((b'root', b'A'),),
900
(b'root', b'D'): ((b'root', b'C'),),
901
(b'root', b'E:'): ((b'root', b'B'), (b'root', b'D')),
888
('root', 'B'): (('root', 'A'),),
889
('root', 'D'): (('root', 'C'),),
890
('root', 'E:'): (('root', 'B'), ('root', 'D')),
903
892
self.plan_merge_vf.get_parent_map(
904
[(b'root', b'B'), (b'root', b'D'), (b'root', b'E:'), (b'root', b'F')]))
893
[('root', 'B'), ('root', 'D'), ('root', 'E:'), ('root', 'F')]))
906
895
def test_get_record_stream(self):
907
896
self.setup_abcde()
909
897
def get_record(suffix):
910
898
return next(self.plan_merge_vf.get_record_stream(
911
[(b'root', suffix)], 'unordered', True))
912
self.assertEqual(b'a', get_record(b'A').get_bytes_as('fulltext'))
913
self.assertEqual(b'a', b''.join(get_record(b'A').iter_bytes_as('chunked')))
914
self.assertEqual(b'c', get_record(b'C').get_bytes_as('fulltext'))
915
self.assertEqual(b'e', get_record(b'E:').get_bytes_as('fulltext'))
899
[('root', suffix)], 'unordered', True))
900
self.assertEqual('a', get_record('A').get_bytes_as('fulltext'))
901
self.assertEqual('c', get_record('C').get_bytes_as('fulltext'))
902
self.assertEqual('e', get_record('E:').get_bytes_as('fulltext'))
916
903
self.assertEqual('absent', get_record('F').storage_kind)
984
971
mp = list(map(addcrlf, mp))
985
972
self.assertEqual(mt.readlines(), mp)
987
975
def testOneInsert(self):
993
981
def testSeparateInserts(self):
994
self.doMerge([b'aaa', b'bbb', b'ccc'],
995
[b'aaa', b'xxx', b'bbb', b'ccc'],
996
[b'aaa', b'bbb', b'yyy', b'ccc'],
997
[b'aaa', b'xxx', b'bbb', b'yyy', b'ccc'])
982
self.doMerge(['aaa', 'bbb', 'ccc'],
983
['aaa', 'xxx', 'bbb', 'ccc'],
984
['aaa', 'bbb', 'yyy', 'ccc'],
985
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
999
987
def testSameInsert(self):
1000
self.doMerge([b'aaa', b'bbb', b'ccc'],
1001
[b'aaa', b'xxx', b'bbb', b'ccc'],
1002
[b'aaa', b'xxx', b'bbb', b'yyy', b'ccc'],
1003
[b'aaa', b'xxx', b'bbb', b'yyy', b'ccc'])
1004
overlappedInsertExpected = [b'aaa', b'xxx', b'yyy', b'bbb']
988
self.doMerge(['aaa', 'bbb', 'ccc'],
989
['aaa', 'xxx', 'bbb', 'ccc'],
990
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'],
991
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
992
overlappedInsertExpected = ['aaa', 'xxx', 'yyy', 'bbb']
1006
993
def testOverlappedInsert(self):
1007
self.doMerge([b'aaa', b'bbb'],
1008
[b'aaa', b'xxx', b'yyy', b'bbb'],
1009
[b'aaa', b'xxx', b'bbb'], self.overlappedInsertExpected)
994
self.doMerge(['aaa', 'bbb'],
995
['aaa', 'xxx', 'yyy', 'bbb'],
996
['aaa', 'xxx', 'bbb'], self.overlappedInsertExpected)
1011
998
# really it ought to reduce this to
1012
# [b'aaa', b'xxx', b'yyy', b'bbb']
999
# ['aaa', 'xxx', 'yyy', 'bbb']
1014
1002
def testClashReplace(self):
1015
self.doMerge([b'aaa'],
1018
[b'<<<<<<< ', b'xxx', b'=======', b'yyy', b'zzz',
1003
self.doMerge(['aaa'],
1006
['<<<<<<< ', 'xxx', '=======', 'yyy', 'zzz',
1021
1009
def testNonClashInsert1(self):
1022
self.doMerge([b'aaa'],
1025
[b'<<<<<<< ', b'xxx', b'aaa', b'=======', b'yyy', b'zzz',
1010
self.doMerge(['aaa'],
1013
['<<<<<<< ', 'xxx', 'aaa', '=======', 'yyy', 'zzz',
1028
1016
def testNonClashInsert2(self):
1029
self.doMerge([b'aaa'],
1017
self.doMerge(['aaa'],
1034
1023
def testDeleteAndModify(self):
1035
1024
"""Clashing delete and modification.
1042
1031
# skippd, not working yet
1045
self.doMerge([b'aaa', b'bbb', b'ccc'],
1046
[b'aaa', b'ddd', b'ccc'],
1048
[b'<<<<<<<< ', b'aaa', b'=======', b'>>>>>>> ', b'ccc'])
1034
self.doMerge(['aaa', 'bbb', 'ccc'],
1035
['aaa', 'ddd', 'ccc'],
1037
['<<<<<<<< ', 'aaa', '=======', '>>>>>>> ', 'ccc'])
1050
1039
def _test_merge_from_strings(self, base, a, b, expected):
1051
1040
w = self.get_file()
1052
w.add_lines(b'text0', [], base.splitlines(True))
1053
w.add_lines(b'text1', [b'text0'], a.splitlines(True))
1054
w.add_lines(b'text2', [b'text0'], b.splitlines(True))
1041
w.add_lines('text0', [], base.splitlines(True))
1042
w.add_lines('text1', ['text0'], a.splitlines(True))
1043
w.add_lines('text2', ['text0'], b.splitlines(True))
1055
1044
self.log('merge plan:')
1056
p = list(w.plan_merge(b'text1', b'text2'))
1045
p = list(w.plan_merge('text1', 'text2'))
1057
1046
for state, line in p:
1059
1048
self.log('%12s | %s' % (state, line[:-1]))
1060
1049
self.log('merge result:')
1061
result_text = b''.join(w.weave_merge(p))
1050
result_text = ''.join(w.weave_merge(p))
1062
1051
self.log(result_text)
1063
1052
self.assertEqualDiff(result_text, expected)
1065
1054
def test_weave_merge_conflicts(self):
1066
1055
# does weave merge properly handle plans that end with unchanged?
1067
result = b''.join(self.get_file().weave_merge([('new-a', b'hello\n')]))
1068
self.assertEqual(result, b'hello\n')
1056
result = ''.join(self.get_file().weave_merge([('new-a', 'hello\n')]))
1057
self.assertEqual(result, 'hello\n')
1070
1059
def test_deletion_extended(self):
1071
1060
"""One side deletes, the other deletes more.
1370
1351
def test_identity_mapper(self):
1371
1352
mapper = versionedfile.ConstantMapper("inventory")
1372
self.assertEqual("inventory", mapper.map((b'foo@ar',)))
1373
self.assertEqual("inventory", mapper.map((b'quux',)))
1353
self.assertEqual("inventory", mapper.map(('foo@ar',)))
1354
self.assertEqual("inventory", mapper.map(('quux',)))
1375
1356
def test_prefix_mapper(self):
1376
1357
#format5: plain
1377
1358
mapper = versionedfile.PrefixMapper()
1378
self.assertEqual("file-id", mapper.map((b"file-id", b"revision-id")))
1379
self.assertEqual("new-id", mapper.map((b"new-id", b"revision-id")))
1380
self.assertEqual((b'file-id',), mapper.unmap("file-id"))
1381
self.assertEqual((b'new-id',), mapper.unmap("new-id"))
1359
self.assertEqual("file-id", mapper.map(("file-id", "revision-id")))
1360
self.assertEqual("new-id", mapper.map(("new-id", "revision-id")))
1361
self.assertEqual(('file-id',), mapper.unmap("file-id"))
1362
self.assertEqual(('new-id',), mapper.unmap("new-id"))
1383
1364
def test_hash_prefix_mapper(self):
1384
1365
#format6: hash + plain
1385
1366
mapper = versionedfile.HashPrefixMapper()
1387
"9b/file-id", mapper.map((b"file-id", b"revision-id")))
1388
self.assertEqual("45/new-id", mapper.map((b"new-id", b"revision-id")))
1389
self.assertEqual((b'file-id',), mapper.unmap("9b/file-id"))
1390
self.assertEqual((b'new-id',), mapper.unmap("45/new-id"))
1367
self.assertEqual("9b/file-id", mapper.map(("file-id", "revision-id")))
1368
self.assertEqual("45/new-id", mapper.map(("new-id", "revision-id")))
1369
self.assertEqual(('file-id',), mapper.unmap("9b/file-id"))
1370
self.assertEqual(('new-id',), mapper.unmap("45/new-id"))
1392
1372
def test_hash_escaped_mapper(self):
1393
1373
#knit1: hash + escaped
1394
1374
mapper = versionedfile.HashEscapedPrefixMapper()
1395
self.assertEqual("88/%2520", mapper.map((b" ", b"revision-id")))
1396
self.assertEqual("ed/fil%2545-%2549d", mapper.map((b"filE-Id",
1398
self.assertEqual("88/ne%2557-%2549d", mapper.map((b"neW-Id",
1400
self.assertEqual((b'filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))
1401
self.assertEqual((b'neW-Id',), mapper.unmap("88/ne%2557-%2549d"))
1375
self.assertEqual("88/%2520", mapper.map((" ", "revision-id")))
1376
self.assertEqual("ed/fil%2545-%2549d", mapper.map(("filE-Id",
1378
self.assertEqual("88/ne%2557-%2549d", mapper.map(("neW-Id",
1380
self.assertEqual(('filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))
1381
self.assertEqual(('neW-Id',), mapper.unmap("88/ne%2557-%2549d"))
1404
1384
class TestVersionedFiles(TestCaseWithMemoryTransport):
1515
1495
raise TestNotApplicable("%s doesn't support fallbacks"
1516
1496
% (f.__class__.__name__,))
1517
1497
g = self.get_versionedfiles('fallback')
1518
key_a = self.get_simple_key(b'a')
1519
g.add_lines(key_a, [], [b'\n'])
1498
key_a = self.get_simple_key('a')
1499
g.add_lines(key_a, [], ['\n'])
1520
1500
f.add_fallback_versioned_files(g)
1521
1501
self.assertTrue(key_a in f.get_parent_map([key_a]))
1523
key_a in f.without_fallbacks().get_parent_map([key_a]))
1502
self.assertFalse(key_a in f.without_fallbacks().get_parent_map([key_a]))
1525
1504
def test_add_lines(self):
1526
1505
f = self.get_versionedfiles()
1527
key0 = self.get_simple_key(b'r0')
1528
key1 = self.get_simple_key(b'r1')
1529
key2 = self.get_simple_key(b'r2')
1530
keyf = self.get_simple_key(b'foo')
1531
f.add_lines(key0, [], [b'a\n', b'b\n'])
1506
key0 = self.get_simple_key('r0')
1507
key1 = self.get_simple_key('r1')
1508
key2 = self.get_simple_key('r2')
1509
keyf = self.get_simple_key('foo')
1510
f.add_lines(key0, [], ['a\n', 'b\n'])
1533
f.add_lines(key1, [key0], [b'b\n', b'c\n'])
1512
f.add_lines(key1, [key0], ['b\n', 'c\n'])
1535
f.add_lines(key1, [], [b'b\n', b'c\n'])
1514
f.add_lines(key1, [], ['b\n', 'c\n'])
1536
1515
keys = f.keys()
1537
1516
self.assertTrue(key0 in keys)
1538
1517
self.assertTrue(key1 in keys)
1540
1519
for record in f.get_record_stream([key0, key1], 'unordered', True):
1541
1520
records.append((record.key, record.get_bytes_as('fulltext')))
1543
self.assertEqual([(key0, b'a\nb\n'), (key1, b'b\nc\n')], records)
1545
def test_add_chunks(self):
1546
f = self.get_versionedfiles()
1547
key0 = self.get_simple_key(b'r0')
1548
key1 = self.get_simple_key(b'r1')
1549
key2 = self.get_simple_key(b'r2')
1550
keyf = self.get_simple_key(b'foo')
1551
def add_chunks(key, parents, chunks):
1552
factory = ChunkedContentFactory(
1553
key, parents, osutils.sha_strings(chunks), chunks)
1554
return f.add_content(factory)
1556
add_chunks(key0, [], [b'a', b'\nb\n'])
1558
add_chunks(key1, [key0], [b'b', b'\n', b'c\n'])
1560
add_chunks(key1, [], [b'b\n', b'c\n'])
1562
self.assertIn(key0, keys)
1563
self.assertIn(key1, keys)
1565
for record in f.get_record_stream([key0, key1], 'unordered', True):
1566
records.append((record.key, record.get_bytes_as('fulltext')))
1568
self.assertEqual([(key0, b'a\nb\n'), (key1, b'b\nc\n')], records)
1522
self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1570
1524
def test_annotate(self):
1571
1525
files = self.get_versionedfiles()
1573
1527
if self.key_length == 1:
1576
prefix = (b'FileA',)
1577
1531
# introduced full text
1578
origins = files.annotate(prefix + (b'origin',))
1532
origins = files.annotate(prefix + ('origin',))
1579
1533
self.assertEqual([
1580
(prefix + (b'origin',), b'origin\n')],
1534
(prefix + ('origin',), 'origin\n')],
1583
origins = files.annotate(prefix + (b'base',))
1537
origins = files.annotate(prefix + ('base',))
1584
1538
self.assertEqual([
1585
(prefix + (b'base',), b'base\n')],
1539
(prefix + ('base',), 'base\n')],
1588
origins = files.annotate(prefix + (b'merged',))
1542
origins = files.annotate(prefix + ('merged',))
1590
1544
self.assertEqual([
1591
(prefix + (b'base',), b'base\n'),
1592
(prefix + (b'left',), b'left\n'),
1593
(prefix + (b'right',), b'right\n'),
1594
(prefix + (b'merged',), b'merged\n')
1545
(prefix + ('base',), 'base\n'),
1546
(prefix + ('left',), 'left\n'),
1547
(prefix + ('right',), 'right\n'),
1548
(prefix + ('merged',), 'merged\n')
1598
1552
# Without a graph everything is new.
1599
1553
self.assertEqual([
1600
(prefix + (b'merged',), b'base\n'),
1601
(prefix + (b'merged',), b'left\n'),
1602
(prefix + (b'merged',), b'right\n'),
1603
(prefix + (b'merged',), b'merged\n')
1554
(prefix + ('merged',), 'base\n'),
1555
(prefix + ('merged',), 'left\n'),
1556
(prefix + ('merged',), 'right\n'),
1557
(prefix + ('merged',), 'merged\n')
1606
1560
self.assertRaises(RevisionNotPresent,
1607
files.annotate, prefix + ('missing-key',))
1561
files.annotate, prefix + ('missing-key',))
1609
1563
def test_check_no_parameters(self):
1610
1564
files = self.get_versionedfiles()
1685
1639
results.append(add[:2])
1686
1640
if self.key_length == 1:
1687
1641
self.assertEqual([
1688
(b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1689
(b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1690
(b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1691
(b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1692
(b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1642
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1643
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1644
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1645
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1646
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1694
1648
elif self.key_length == 2:
1695
1649
self.assertEqual([
1696
(b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1697
(b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1698
(b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1699
(b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1700
(b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1701
(b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1702
(b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1703
(b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1704
(b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1705
(b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1650
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1651
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1652
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1653
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1654
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1655
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1656
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1657
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1658
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1659
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1708
1662
def test_add_lines_no_key_generates_chk_key(self):
1716
1670
results.append(add[:2])
1717
1671
if self.key_length == 1:
1718
1672
self.assertEqual([
1719
(b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1720
(b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1721
(b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1722
(b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1723
(b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1673
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1674
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1675
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1676
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1677
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1725
1679
# Check the added items got CHK keys.
1726
1680
self.assertEqual({
1727
(b'sha1:00e364d235126be43292ab09cb4686cf703ddc17',),
1728
(b'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',),
1729
(b'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',),
1730
(b'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f',),
1731
(b'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d',),
1681
('sha1:00e364d235126be43292ab09cb4686cf703ddc17',),
1682
('sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',),
1683
('sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',),
1684
('sha1:a8478686da38e370e32e42e8a0c220e33ee9132f',),
1685
('sha1:ed8bce375198ea62444dc71952b22cfc2b09226d',),
1734
1688
elif self.key_length == 2:
1735
1689
self.assertEqual([
1736
(b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1737
(b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1738
(b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1739
(b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1740
(b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1741
(b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1742
(b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1743
(b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1744
(b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1745
(b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1690
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1691
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1692
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1693
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1694
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1695
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1696
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1697
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1698
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1699
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1747
1701
# Check the added items got CHK keys.
1748
1702
self.assertEqual({
1749
(b'FileA', b'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1750
(b'FileA', b'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1751
(b'FileA', b'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1752
(b'FileA', b'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1753
(b'FileA', b'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1754
(b'FileB', b'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1755
(b'FileB', b'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1756
(b'FileB', b'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1757
(b'FileB', b'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1758
(b'FileB', b'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1703
('FileA', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1704
('FileA', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1705
('FileA', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1706
('FileA', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1707
('FileA', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1708
('FileB', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1709
('FileB', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1710
('FileB', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1711
('FileB', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1712
('FileB', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1762
1716
def test_empty_lines(self):
1763
1717
"""Empty files can be stored."""
1764
1718
f = self.get_versionedfiles()
1765
key_a = self.get_simple_key(b'a')
1719
key_a = self.get_simple_key('a')
1766
1720
f.add_lines(key_a, [], [])
1767
self.assertEqual(b'',
1768
next(f.get_record_stream([key_a], 'unordered', True
1769
)).get_bytes_as('fulltext'))
1770
key_b = self.get_simple_key(b'b')
1721
self.assertEqual('',
1722
f.get_record_stream([key_a], 'unordered', True
1723
).next().get_bytes_as('fulltext'))
1724
key_b = self.get_simple_key('b')
1771
1725
f.add_lines(key_b, self.get_parents([key_a]), [])
1772
self.assertEqual(b'',
1773
next(f.get_record_stream([key_b], 'unordered', True
1774
)).get_bytes_as('fulltext'))
1726
self.assertEqual('',
1727
f.get_record_stream([key_b], 'unordered', True
1728
).next().get_bytes_as('fulltext'))
1776
1730
def test_newline_only(self):
1777
1731
f = self.get_versionedfiles()
1778
key_a = self.get_simple_key(b'a')
1779
f.add_lines(key_a, [], [b'\n'])
1780
self.assertEqual(b'\n',
1781
next(f.get_record_stream([key_a], 'unordered', True
1782
)).get_bytes_as('fulltext'))
1783
key_b = self.get_simple_key(b'b')
1784
f.add_lines(key_b, self.get_parents([key_a]), [b'\n'])
1785
self.assertEqual(b'\n',
1786
next(f.get_record_stream([key_b], 'unordered', True
1787
)).get_bytes_as('fulltext'))
1732
key_a = self.get_simple_key('a')
1733
f.add_lines(key_a, [], ['\n'])
1734
self.assertEqual('\n',
1735
f.get_record_stream([key_a], 'unordered', True
1736
).next().get_bytes_as('fulltext'))
1737
key_b = self.get_simple_key('b')
1738
f.add_lines(key_b, self.get_parents([key_a]), ['\n'])
1739
self.assertEqual('\n',
1740
f.get_record_stream([key_b], 'unordered', True
1741
).next().get_bytes_as('fulltext'))
1789
1743
def test_get_known_graph_ancestry(self):
1790
1744
f = self.get_versionedfiles()
1791
1745
if not self.graph:
1792
1746
raise TestNotApplicable('ancestry info only relevant with graph.')
1793
key_a = self.get_simple_key(b'a')
1794
key_b = self.get_simple_key(b'b')
1795
key_c = self.get_simple_key(b'c')
1747
key_a = self.get_simple_key('a')
1748
key_b = self.get_simple_key('b')
1749
key_c = self.get_simple_key('c')
1801
f.add_lines(key_a, [], [b'\n'])
1802
f.add_lines(key_b, [key_a], [b'\n'])
1803
f.add_lines(key_c, [key_a, key_b], [b'\n'])
1755
f.add_lines(key_a, [], ['\n'])
1756
f.add_lines(key_b, [key_a], ['\n'])
1757
f.add_lines(key_c, [key_a, key_b], ['\n'])
1804
1758
kg = f.get_known_graph_ancestry([key_c])
1805
1759
self.assertIsInstance(kg, _mod_graph.KnownGraph)
1806
1760
self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
1837
1791
def assertValidStorageKind(self, storage_kind):
1838
1792
"""Assert that storage_kind is a valid storage_kind."""
1839
1793
self.assertSubset([storage_kind],
1840
['mpdiff', 'knit-annotated-ft', 'knit-annotated-delta',
1841
'knit-ft', 'knit-delta', 'chunked', 'fulltext',
1842
'knit-annotated-ft-gz', 'knit-annotated-delta-gz', 'knit-ft-gz',
1844
'knit-delta-closure', 'knit-delta-closure-ref',
1845
'groupcompress-block', 'groupcompress-block-ref'])
1794
['mpdiff', 'knit-annotated-ft', 'knit-annotated-delta',
1795
'knit-ft', 'knit-delta', 'chunked', 'fulltext',
1796
'knit-annotated-ft-gz', 'knit-annotated-delta-gz', 'knit-ft-gz',
1798
'knit-delta-closure', 'knit-delta-closure-ref',
1799
'groupcompress-block', 'groupcompress-block-ref'])
1847
1801
def capture_stream(self, f, entries, on_seen, parents,
1848
require_fulltext=False):
1802
require_fulltext=False):
1849
1803
"""Capture a stream for testing."""
1850
1804
for factory in entries:
1851
1805
on_seen(factory.key)
1852
1806
self.assertValidStorageKind(factory.storage_kind)
1853
1807
if factory.sha1 is not None:
1854
1808
self.assertEqual(f.get_sha1s([factory.key])[factory.key],
1856
1810
self.assertEqual(parents[factory.key], factory.parents)
1857
1811
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1859
1813
if require_fulltext:
1860
1814
factory.get_bytes_as('fulltext')
1873
1827
def get_keys_and_sort_order(self):
1874
1828
"""Get diamond test keys list, and their sort ordering."""
1875
1829
if self.key_length == 1:
1876
keys = [(b'merged',), (b'left',), (b'right',), (b'base',)]
1877
sort_order = {(b'merged',): 2, (b'left',): 1,
1878
(b'right',): 1, (b'base',): 0}
1830
keys = [('merged',), ('left',), ('right',), ('base',)]
1831
sort_order = {('merged',):2, ('left',):1, ('right',):1, ('base',):0}
1881
(b'FileA', b'merged'), (b'FileA', b'left'), (b'FileA', b'right'),
1882
(b'FileA', b'base'),
1883
(b'FileB', b'merged'), (b'FileB', b'left'), (b'FileB', b'right'),
1884
(b'FileB', b'base'),
1834
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1836
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1887
(b'FileA', b'merged'): 2, (b'FileA', b'left'): 1, (b'FileA', b'right'): 1,
1888
(b'FileA', b'base'): 0,
1889
(b'FileB', b'merged'): 2, (b'FileB', b'left'): 1, (b'FileB', b'right'): 1,
1890
(b'FileB', b'base'): 0,
1840
('FileA', 'merged'): 2, ('FileA', 'left'): 1, ('FileA', 'right'): 1,
1841
('FileA', 'base'): 0,
1842
('FileB', 'merged'): 2, ('FileB', 'left'): 1, ('FileB', 'right'): 1,
1843
('FileB', 'base'): 0,
1892
1845
return keys, sort_order
1894
1847
def get_keys_and_groupcompress_sort_order(self):
1895
1848
"""Get diamond test keys list, and their groupcompress sort ordering."""
1896
1849
if self.key_length == 1:
1897
keys = [(b'merged',), (b'left',), (b'right',), (b'base',)]
1898
sort_order = {(b'merged',): 0, (b'left',): 1,
1899
(b'right',): 1, (b'base',): 2}
1850
keys = [('merged',), ('left',), ('right',), ('base',)]
1851
sort_order = {('merged',):0, ('left',):1, ('right',):1, ('base',):2}
1902
(b'FileA', b'merged'), (b'FileA', b'left'), (b'FileA', b'right'),
1903
(b'FileA', b'base'),
1904
(b'FileB', b'merged'), (b'FileB', b'left'), (b'FileB', b'right'),
1905
(b'FileB', b'base'),
1854
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1856
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1908
(b'FileA', b'merged'): 0, (b'FileA', b'left'): 1, (b'FileA', b'right'): 1,
1909
(b'FileA', b'base'): 2,
1910
(b'FileB', b'merged'): 3, (b'FileB', b'left'): 4, (b'FileB', b'right'): 4,
1911
(b'FileB', b'base'): 5,
1860
('FileA', 'merged'): 0, ('FileA', 'left'): 1, ('FileA', 'right'): 1,
1861
('FileA', 'base'): 2,
1862
('FileB', 'merged'): 3, ('FileB', 'left'): 4, ('FileB', 'right'): 4,
1863
('FileB', 'base'): 5,
1913
1865
return keys, sort_order
2000
1950
self.assertEqual(parent_map[factory.key], factory.parents)
2001
1951
# currently no stream emits mpdiff
2002
1952
self.assertRaises(errors.UnavailableRepresentation,
2003
factory.get_bytes_as, 'mpdiff')
1953
factory.get_bytes_as, 'mpdiff')
2004
1954
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
2006
1956
self.assertEqual(set(keys), seen)
2008
1958
def test_get_record_stream_missing_records_are_absent(self):
2009
1959
files = self.get_versionedfiles()
2010
1960
self.get_diamond_files(files)
2011
1961
if self.key_length == 1:
2012
keys = [(b'merged',), (b'left',), (b'right',),
2013
(b'absent',), (b'base',)]
1962
keys = [('merged',), ('left',), ('right',), ('absent',), ('base',)]
2016
(b'FileA', b'merged'), (b'FileA', b'left'), (b'FileA', b'right'),
2017
(b'FileA', b'absent'), (b'FileA', b'base'),
2018
(b'FileB', b'merged'), (b'FileB', b'left'), (b'FileB', b'right'),
2019
(b'FileB', b'absent'), (b'FileB', b'base'),
2020
(b'absent', b'absent'),
1965
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1966
('FileA', 'absent'), ('FileA', 'base'),
1967
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1968
('FileB', 'absent'), ('FileB', 'base'),
1969
('absent', 'absent'),
2022
1971
parent_map = files.get_parent_map(keys)
2023
1972
entries = files.get_record_stream(keys, 'unordered', False)
2282
2228
self.assertRaises(RevisionNotPresent,
2283
files.get_annotator().annotate, self.get_simple_key(b'missing-key'))
2229
files.get_annotator().annotate, self.get_simple_key('missing-key'))
2285
2231
def test_get_parent_map(self):
2286
2232
files = self.get_versionedfiles()
2287
2233
if self.key_length == 1:
2288
2234
parent_details = [
2289
((b'r0',), self.get_parents(())),
2290
((b'r1',), self.get_parents(((b'r0',),))),
2291
((b'r2',), self.get_parents(())),
2292
((b'r3',), self.get_parents(())),
2293
((b'm',), self.get_parents(((b'r0',), (b'r1',), (b'r2',), (b'r3',)))),
2235
(('r0',), self.get_parents(())),
2236
(('r1',), self.get_parents((('r0',),))),
2237
(('r2',), self.get_parents(())),
2238
(('r3',), self.get_parents(())),
2239
(('m',), self.get_parents((('r0',), ('r1',), ('r2',), ('r3',)))),
2296
2242
parent_details = [
2297
((b'FileA', b'r0'), self.get_parents(())),
2298
((b'FileA', b'r1'), self.get_parents(((b'FileA', b'r0'),))),
2299
((b'FileA', b'r2'), self.get_parents(())),
2300
((b'FileA', b'r3'), self.get_parents(())),
2301
((b'FileA', b'm'), self.get_parents(((b'FileA', b'r0'),
2302
(b'FileA', b'r1'), (b'FileA', b'r2'), (b'FileA', b'r3')))),
2243
(('FileA', 'r0'), self.get_parents(())),
2244
(('FileA', 'r1'), self.get_parents((('FileA', 'r0'),))),
2245
(('FileA', 'r2'), self.get_parents(())),
2246
(('FileA', 'r3'), self.get_parents(())),
2247
(('FileA', 'm'), self.get_parents((('FileA', 'r0'),
2248
('FileA', 'r1'), ('FileA', 'r2'), ('FileA', 'r3')))),
2304
2250
for key, parents in parent_details:
2305
2251
files.add_lines(key, parents, [])
2306
2252
# immediately after adding it should be queryable.
2307
self.assertEqual({key: parents}, files.get_parent_map([key]))
2253
self.assertEqual({key:parents}, files.get_parent_map([key]))
2308
2254
# We can ask for an empty set
2309
2255
self.assertEqual({}, files.get_parent_map([]))
2310
2256
# We can ask for many keys
2311
2257
all_parents = dict(parent_details)
2312
2258
self.assertEqual(all_parents, files.get_parent_map(all_parents.keys()))
2313
2259
# Absent keys are just not included in the result.
2314
keys = list(all_parents.keys())
2260
keys = all_parents.keys()
2315
2261
if self.key_length == 1:
2316
keys.insert(1, (b'missing',))
2262
keys.insert(1, ('missing',))
2318
keys.insert(1, (b'missing', b'missing'))
2264
keys.insert(1, ('missing', 'missing'))
2319
2265
# Absent keys are just ignored
2320
2266
self.assertEqual(all_parents, files.get_parent_map(keys))
2323
2269
files = self.get_versionedfiles()
2324
2270
self.get_diamond_files(files)
2325
2271
if self.key_length == 1:
2326
keys = [(b'base',), (b'origin',), (b'left',),
2327
(b'merged',), (b'right',)]
2272
keys = [('base',), ('origin',), ('left',), ('merged',), ('right',)]
2329
2274
# ask for shas from different prefixes.
2331
(b'FileA', b'base'), (b'FileB', b'origin'), (b'FileA', b'left'),
2332
(b'FileA', b'merged'), (b'FileB', b'right'),
2276
('FileA', 'base'), ('FileB', 'origin'), ('FileA', 'left'),
2277
('FileA', 'merged'), ('FileB', 'right'),
2334
2279
self.assertEqual({
2335
keys[0]: b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',
2336
keys[1]: b'00e364d235126be43292ab09cb4686cf703ddc17',
2337
keys[2]: b'a8478686da38e370e32e42e8a0c220e33ee9132f',
2338
keys[3]: b'ed8bce375198ea62444dc71952b22cfc2b09226d',
2339
keys[4]: b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',
2280
keys[0]: '51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',
2281
keys[1]: '00e364d235126be43292ab09cb4686cf703ddc17',
2282
keys[2]: 'a8478686da38e370e32e42e8a0c220e33ee9132f',
2283
keys[3]: 'ed8bce375198ea62444dc71952b22cfc2b09226d',
2284
keys[4]: '9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',
2341
2286
files.get_sha1s(keys))
2471
2415
source = self.get_versionedfiles('source')
2472
2416
self.get_diamond_files(source)
2473
2417
if self.key_length == 1:
2474
origin_keys = [(b'origin',)]
2475
end_keys = [(b'merged',), (b'left',)]
2476
start_keys = [(b'right',), (b'base',)]
2418
origin_keys = [('origin',)]
2419
end_keys = [('merged',), ('left',)]
2420
start_keys = [('right',), ('base',)]
2478
origin_keys = [(b'FileA', b'origin'), (b'FileB', b'origin')]
2479
end_keys = [(b'FileA', b'merged',), (b'FileA', b'left',),
2480
(b'FileB', b'merged',), (b'FileB', b'left',)]
2481
start_keys = [(b'FileA', b'right',), (b'FileA', b'base',),
2482
(b'FileB', b'right',), (b'FileB', b'base',)]
2483
origin_entries = source.get_record_stream(
2484
origin_keys, 'unordered', False)
2422
origin_keys = [('FileA', 'origin'), ('FileB', 'origin')]
2423
end_keys = [('FileA', 'merged',), ('FileA', 'left',),
2424
('FileB', 'merged',), ('FileB', 'left',)]
2425
start_keys = [('FileA', 'right',), ('FileA', 'base',),
2426
('FileB', 'right',), ('FileB', 'base',)]
2427
origin_entries = source.get_record_stream(origin_keys, 'unordered', False)
2485
2428
end_entries = source.get_record_stream(end_keys, 'topological', False)
2486
start_entries = source.get_record_stream(
2487
start_keys, 'topological', False)
2429
start_entries = source.get_record_stream(start_keys, 'topological', False)
2488
2430
entries = itertools.chain(origin_entries, end_entries, start_entries)
2490
2432
files.insert_record_stream(entries)
2618
2558
files = self.get_versionedfiles()
2619
2559
# add a base to get included
2620
files.add_lines(self.get_simple_key(b'base'), (), [b'base\n'])
2560
files.add_lines(self.get_simple_key('base'), (), ['base\n'])
2621
2561
# add a ancestor to be included on one side
2622
files.add_lines(self.get_simple_key(
2623
b'lancestor'), (), [b'lancestor\n'])
2562
files.add_lines(self.get_simple_key('lancestor'), (), ['lancestor\n'])
2624
2563
# add a ancestor to be included on the other side
2625
files.add_lines(self.get_simple_key(b'rancestor'),
2626
self.get_parents([self.get_simple_key(b'base')]), [b'rancestor\n'])
2564
files.add_lines(self.get_simple_key('rancestor'),
2565
self.get_parents([self.get_simple_key('base')]), ['rancestor\n'])
2627
2566
# add a child of rancestor with no eofile-nl
2628
files.add_lines(self.get_simple_key(b'child'),
2629
self.get_parents([self.get_simple_key(b'rancestor')]),
2630
[b'base\n', b'child\n'])
2567
files.add_lines(self.get_simple_key('child'),
2568
self.get_parents([self.get_simple_key('rancestor')]),
2569
['base\n', 'child\n'])
2631
2570
# add a child of lancestor and base to join the two roots
2632
files.add_lines(self.get_simple_key(b'otherchild'),
2633
self.get_parents([self.get_simple_key(b'lancestor'),
2634
self.get_simple_key(b'base')]),
2635
[b'base\n', b'lancestor\n', b'otherchild\n'])
2571
files.add_lines(self.get_simple_key('otherchild'),
2572
self.get_parents([self.get_simple_key('lancestor'),
2573
self.get_simple_key('base')]),
2574
['base\n', 'lancestor\n', 'otherchild\n'])
2637
2575
def iter_with_keys(keys, expected):
2638
2576
# now we need to see what lines are returned, and how often.
2640
2578
progress = InstrumentedProgress()
2641
2579
# iterate over the lines
2642
2580
for line in files.iter_lines_added_or_present_in_keys(keys,
2644
2582
lines.setdefault(line, 0)
2645
2583
lines[line] += 1
2646
if [] != progress.updates:
2584
if []!= progress.updates:
2647
2585
self.assertEqual(expected, progress.updates)
2649
2587
lines = iter_with_keys(
2650
[self.get_simple_key(b'child'),
2651
self.get_simple_key(b'otherchild')],
2588
[self.get_simple_key('child'), self.get_simple_key('otherchild')],
2652
2589
[('Walking content', 0, 2),
2653
2590
('Walking content', 1, 2),
2654
2591
('Walking content', 2, 2)])
2655
2592
# we must see child and otherchild
2656
self.assertTrue(lines[(b'child\n', self.get_simple_key(b'child'))] > 0)
2593
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2657
2594
self.assertTrue(
2658
lines[(b'otherchild\n', self.get_simple_key(b'otherchild'))] > 0)
2595
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2659
2596
# we dont care if we got more than that.
2661
2598
# test all lines
2662
2599
lines = iter_with_keys(files.keys(),
2663
[('Walking content', 0, 5),
2664
('Walking content', 1, 5),
2665
('Walking content', 2, 5),
2666
('Walking content', 3, 5),
2667
('Walking content', 4, 5),
2668
('Walking content', 5, 5)])
2600
[('Walking content', 0, 5),
2601
('Walking content', 1, 5),
2602
('Walking content', 2, 5),
2603
('Walking content', 3, 5),
2604
('Walking content', 4, 5),
2605
('Walking content', 5, 5)])
2669
2606
# all lines must be seen at least once
2670
self.assertTrue(lines[(b'base\n', self.get_simple_key(b'base'))] > 0)
2672
lines[(b'lancestor\n', self.get_simple_key(b'lancestor'))] > 0)
2674
lines[(b'rancestor\n', self.get_simple_key(b'rancestor'))] > 0)
2675
self.assertTrue(lines[(b'child\n', self.get_simple_key(b'child'))] > 0)
2677
lines[(b'otherchild\n', self.get_simple_key(b'otherchild'))] > 0)
2607
self.assertTrue(lines[('base\n', self.get_simple_key('base'))] > 0)
2609
lines[('lancestor\n', self.get_simple_key('lancestor'))] > 0)
2611
lines[('rancestor\n', self.get_simple_key('rancestor'))] > 0)
2612
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2614
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2679
2616
def test_make_mpdiffs(self):
2680
2617
from breezy import multiparent
2682
2619
# add texts that should trip the knit maximum delta chain threshold
2683
2620
# as well as doing parallel chains of data in knits.
2684
2621
# this is done by two chains of 25 insertions
2685
files.add_lines(self.get_simple_key(b'base'), [], [b'line\n'])
2686
files.add_lines(self.get_simple_key(b'noeol'),
2687
self.get_parents([self.get_simple_key(b'base')]), [b'line'])
2622
files.add_lines(self.get_simple_key('base'), [], ['line\n'])
2623
files.add_lines(self.get_simple_key('noeol'),
2624
self.get_parents([self.get_simple_key('base')]), ['line'])
2688
2625
# detailed eol tests:
2689
2626
# shared last line with parent no-eol
2690
files.add_lines(self.get_simple_key(b'noeolsecond'),
2691
self.get_parents([self.get_simple_key(b'noeol')]),
2692
[b'line\n', b'line'])
2627
files.add_lines(self.get_simple_key('noeolsecond'),
2628
self.get_parents([self.get_simple_key('noeol')]),
2693
2630
# differing last line with parent, both no-eol
2694
files.add_lines(self.get_simple_key(b'noeolnotshared'),
2696
[self.get_simple_key(b'noeolsecond')]),
2697
[b'line\n', b'phone'])
2631
files.add_lines(self.get_simple_key('noeolnotshared'),
2632
self.get_parents([self.get_simple_key('noeolsecond')]),
2633
['line\n', 'phone'])
2698
2634
# add eol following a noneol parent, change content
2699
files.add_lines(self.get_simple_key(b'eol'),
2700
self.get_parents([self.get_simple_key(b'noeol')]), [b'phone\n'])
2635
files.add_lines(self.get_simple_key('eol'),
2636
self.get_parents([self.get_simple_key('noeol')]), ['phone\n'])
2701
2637
# add eol following a noneol parent, no change content
2702
files.add_lines(self.get_simple_key(b'eolline'),
2703
self.get_parents([self.get_simple_key(b'noeol')]), [b'line\n'])
2638
files.add_lines(self.get_simple_key('eolline'),
2639
self.get_parents([self.get_simple_key('noeol')]), ['line\n'])
2704
2640
# noeol with no parents:
2705
files.add_lines(self.get_simple_key(b'noeolbase'), [], [b'line'])
2641
files.add_lines(self.get_simple_key('noeolbase'), [], ['line'])
2706
2642
# noeol preceeding its leftmost parent in the output:
2707
2643
# this is done by making it a merge of two parents with no common
2708
2644
# anestry: noeolbase and noeol with the
2709
2645
# later-inserted parent the leftmost.
2710
files.add_lines(self.get_simple_key(b'eolbeforefirstparent'),
2711
self.get_parents([self.get_simple_key(b'noeolbase'),
2712
self.get_simple_key(b'noeol')]),
2646
files.add_lines(self.get_simple_key('eolbeforefirstparent'),
2647
self.get_parents([self.get_simple_key('noeolbase'),
2648
self.get_simple_key('noeol')]),
2714
2650
# two identical eol texts
2715
files.add_lines(self.get_simple_key(b'noeoldup'),
2716
self.get_parents([self.get_simple_key(b'noeol')]), [b'line'])
2717
next_parent = self.get_simple_key(b'base')
2718
text_name = b'chain1-'
2720
sha1s = {0: b'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
2721
1: b'45e21ea146a81ea44a821737acdb4f9791c8abe7',
2722
2: b'e1f11570edf3e2a070052366c582837a4fe4e9fa',
2723
3: b'26b4b8626da827088c514b8f9bbe4ebf181edda1',
2724
4: b'e28a5510be25ba84d31121cff00956f9970ae6f6',
2725
5: b'd63ec0ce22e11dcf65a931b69255d3ac747a318d',
2726
6: b'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
2727
7: b'95c14da9cafbf828e3e74a6f016d87926ba234ab',
2728
8: b'779e9a0b28f9f832528d4b21e17e168c67697272',
2729
9: b'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
2730
10: b'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
2731
11: b'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
2732
12: b'31a2286267f24d8bedaa43355f8ad7129509ea85',
2733
13: b'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
2734
14: b'2c4b1736566b8ca6051e668de68650686a3922f2',
2735
15: b'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
2736
16: b'b0d2e18d3559a00580f6b49804c23fea500feab3',
2737
17: b'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
2738
18: b'5cf64a3459ae28efa60239e44b20312d25b253f3',
2739
19: b'1ebed371807ba5935958ad0884595126e8c4e823',
2740
20: b'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
2741
21: b'01edc447978004f6e4e962b417a4ae1955b6fe5d',
2742
22: b'd8d8dc49c4bf0bab401e0298bb5ad827768618bb',
2743
23: b'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
2744
24: b'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
2745
25: b'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
2651
files.add_lines(self.get_simple_key('noeoldup'),
2652
self.get_parents([self.get_simple_key('noeol')]), ['line'])
2653
next_parent = self.get_simple_key('base')
2654
text_name = 'chain1-'
2656
sha1s = {0: 'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
2657
1: '45e21ea146a81ea44a821737acdb4f9791c8abe7',
2658
2: 'e1f11570edf3e2a070052366c582837a4fe4e9fa',
2659
3: '26b4b8626da827088c514b8f9bbe4ebf181edda1',
2660
4: 'e28a5510be25ba84d31121cff00956f9970ae6f6',
2661
5: 'd63ec0ce22e11dcf65a931b69255d3ac747a318d',
2662
6: '2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
2663
7: '95c14da9cafbf828e3e74a6f016d87926ba234ab',
2664
8: '779e9a0b28f9f832528d4b21e17e168c67697272',
2665
9: '1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
2666
10: '131a2ae712cf51ed62f143e3fbac3d4206c25a05',
2667
11: 'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
2668
12: '31a2286267f24d8bedaa43355f8ad7129509ea85',
2669
13: 'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
2670
14: '2c4b1736566b8ca6051e668de68650686a3922f2',
2671
15: '5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
2672
16: 'b0d2e18d3559a00580f6b49804c23fea500feab3',
2673
17: '8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
2674
18: '5cf64a3459ae28efa60239e44b20312d25b253f3',
2675
19: '1ebed371807ba5935958ad0884595126e8c4e823',
2676
20: '2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
2677
21: '01edc447978004f6e4e962b417a4ae1955b6fe5d',
2678
22: 'd8d8dc49c4bf0bab401e0298bb5ad827768618bb',
2679
23: 'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
2680
24: 'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
2681
25: 'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
2747
2683
for depth in range(26):
2748
new_version = self.get_simple_key(text_name + b'%d' % depth)
2749
text = text + [b'line\n']
2684
new_version = self.get_simple_key(text_name + '%s' % depth)
2685
text = text + ['line\n']
2750
2686
files.add_lines(new_version, self.get_parents([next_parent]), text)
2751
2687
next_parent = new_version
2752
next_parent = self.get_simple_key(b'base')
2753
text_name = b'chain2-'
2688
next_parent = self.get_simple_key('base')
2689
text_name = 'chain2-'
2755
2691
for depth in range(26):
2756
new_version = self.get_simple_key(text_name + b'%d' % depth)
2757
text = text + [b'line\n']
2692
new_version = self.get_simple_key(text_name + '%s' % depth)
2693
text = text + ['line\n']
2758
2694
files.add_lines(new_version, self.get_parents([next_parent]), text)
2759
2695
next_parent = new_version
2760
2696
target = self.get_versionedfiles('target')
2818
2754
def test_get_sha1s_nonexistent(self):
2819
self.assertEqual({}, self.texts.get_sha1s([(b"NONEXISTENT",)]))
2755
self.assertEqual({}, self.texts.get_sha1s([("NONEXISTENT",)]))
2821
2757
def test_get_sha1s(self):
2822
self._lines[b"key"] = [b"dataline1", b"dataline2"]
2823
self.assertEqual({(b"key",): osutils.sha_strings(self._lines[b"key"])},
2824
self.texts.get_sha1s([(b"key",)]))
2758
self._lines["key"] = ["dataline1", "dataline2"]
2759
self.assertEqual({("key",): osutils.sha_strings(self._lines["key"])},
2760
self.texts.get_sha1s([("key",)]))
2826
2762
def test_get_parent_map(self):
2827
self._parent_map = {b"G": (b"A", b"B")}
2828
self.assertEqual({(b"G",): ((b"A",), (b"B",))},
2829
self.texts.get_parent_map([(b"G",), (b"L",)]))
2763
self._parent_map = {"G": ("A", "B")}
2764
self.assertEqual({("G",): (("A",), ("B",))},
2765
self.texts.get_parent_map([("G",), ("L",)]))
2831
2767
def test_get_record_stream(self):
2832
self._lines[b"A"] = [b"FOO", b"BAR"]
2833
it = self.texts.get_record_stream([(b"A",)], "unordered", True)
2768
self._lines["A"] = ["FOO", "BAR"]
2769
it = self.texts.get_record_stream([("A",)], "unordered", True)
2834
2770
record = next(it)
2835
2771
self.assertEqual("chunked", record.storage_kind)
2836
self.assertEqual(b"FOOBAR", record.get_bytes_as("fulltext"))
2837
self.assertEqual([b"FOO", b"BAR"], record.get_bytes_as("chunked"))
2772
self.assertEqual("FOOBAR", record.get_bytes_as("fulltext"))
2773
self.assertEqual(["FOO", "BAR"], record.get_bytes_as("chunked"))
2839
2775
def test_get_record_stream_absent(self):
2840
it = self.texts.get_record_stream([(b"A",)], "unordered", True)
2776
it = self.texts.get_record_stream([("A",)], "unordered", True)
2841
2777
record = next(it)
2842
2778
self.assertEqual("absent", record.storage_kind)
2844
2780
def test_iter_lines_added_or_present_in_keys(self):
2845
self._lines[b"A"] = [b"FOO", b"BAR"]
2846
self._lines[b"B"] = [b"HEY"]
2847
self._lines[b"C"] = [b"Alberta"]
2848
it = self.texts.iter_lines_added_or_present_in_keys([(b"A",), (b"B",)])
2849
self.assertEqual(sorted([(b"FOO", b"A"), (b"BAR", b"A"), (b"HEY", b"B")]),
2781
self._lines["A"] = ["FOO", "BAR"]
2782
self._lines["B"] = ["HEY"]
2783
self._lines["C"] = ["Alberta"]
2784
it = self.texts.iter_lines_added_or_present_in_keys([("A",), ("B",)])
2785
self.assertEqual(sorted([("FOO", "A"), ("BAR", "A"), ("HEY", "B")]),
2853
2789
class TestOrderingVersionedFilesDecorator(TestCaseWithMemoryTransport):
2873
2809
self.assertEqual([], vf.calls)
2875
2811
def test_get_record_stream_topological(self):
2876
vf = self.get_ordering_vf(
2877
{(b'A',): 3, (b'B',): 2, (b'C',): 4, (b'D',): 1})
2878
request_keys = [(b'B',), (b'C',), (b'D',), (b'A',)]
2812
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2813
request_keys = [('B',), ('C',), ('D',), ('A',)]
2879
2814
keys = [r.key for r in vf.get_record_stream(request_keys,
2880
'topological', False)]
2815
'topological', False)]
2881
2816
# We should have gotten the keys in topological order
2882
self.assertEqual([(b'A',), (b'B',), (b'C',), (b'D',)], keys)
2817
self.assertEqual([('A',), ('B',), ('C',), ('D',)], keys)
2883
2818
# And recorded that the request was made
2884
2819
self.assertEqual([('get_record_stream', request_keys, 'topological',
2885
2820
False)], vf.calls)
2887
2822
def test_get_record_stream_ordered(self):
2888
vf = self.get_ordering_vf(
2889
{(b'A',): 3, (b'B',): 2, (b'C',): 4, (b'D',): 1})
2890
request_keys = [(b'B',), (b'C',), (b'D',), (b'A',)]
2823
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2824
request_keys = [('B',), ('C',), ('D',), ('A',)]
2891
2825
keys = [r.key for r in vf.get_record_stream(request_keys,
2892
'unordered', False)]
2826
'unordered', False)]
2893
2827
# They should be returned based on their priority
2894
self.assertEqual([(b'D',), (b'B',), (b'A',), (b'C',)], keys)
2828
self.assertEqual([('D',), ('B',), ('A',), ('C',)], keys)
2895
2829
# And the request recorded
2896
2830
self.assertEqual([('get_record_stream', request_keys, 'unordered',
2897
2831
False)], vf.calls)
2899
2833
def test_get_record_stream_implicit_order(self):
2900
vf = self.get_ordering_vf({(b'B',): 2, (b'D',): 1})
2901
request_keys = [(b'B',), (b'C',), (b'D',), (b'A',)]
2834
vf = self.get_ordering_vf({('B',): 2, ('D',): 1})
2835
request_keys = [('B',), ('C',), ('D',), ('A',)]
2902
2836
keys = [r.key for r in vf.get_record_stream(request_keys,
2903
'unordered', False)]
2837
'unordered', False)]
2904
2838
# A and C are not in the map, so they get sorted to the front. A comes
2905
2839
# before C alphabetically, so it comes back first
2906
self.assertEqual([(b'A',), (b'C',), (b'D',), (b'B',)], keys)
2840
self.assertEqual([('A',), ('C',), ('D',), ('B',)], keys)
2907
2841
# And the request recorded
2908
2842
self.assertEqual([('get_record_stream', request_keys, 'unordered',
2909
2843
False)], vf.calls)