21
21
# TODO: might be nice to create a versionedfile with some type of corruption
22
22
# considered typical and check that it can be detected/corrected.
24
from itertools import chain, izip
25
from StringIO import StringIO
24
from gzip import GzipFile
29
29
graph as _mod_graph,
36
from bzrlib.errors import (
38
RevisionAlreadyPresent,
41
from bzrlib.knit import (
39
from ..errors import (
41
RevisionAlreadyPresent,
43
from ..bzr.knit import (
48
from bzrlib.tests import (
48
from ..sixish import (
50
54
TestCaseWithMemoryTransport,
54
split_suite_by_condition,
57
from bzrlib.tests.http_utils import TestCaseWithWebserver
58
from bzrlib.trace import mutter
59
from bzrlib.transport import get_transport
60
from bzrlib.transport.memory import MemoryTransport
61
from bzrlib.tsort import topo_sort
62
from bzrlib.tuned_gzip import GzipFile
63
import bzrlib.versionedfile as versionedfile
64
from bzrlib.versionedfile import (
58
from .http_utils import TestCaseWithWebserver
59
from ..transport.memory import MemoryTransport
60
from ..bzr import versionedfile as versionedfile
61
from ..bzr.versionedfile import (
66
63
HashEscapedPrefixMapper,
68
65
VirtualVersionedFiles,
69
66
make_versioned_files_factory,
71
from bzrlib.weave import WeaveFile
72
from bzrlib.weavefile import read_weave, write_weave
75
def load_tests(standard_tests, module, loader):
76
"""Parameterize VersionedFiles tests for different implementations."""
77
to_adapt, result = split_suite_by_condition(
78
standard_tests, condition_isinstance(TestVersionedFiles))
79
# We want to be sure of behaviour for:
80
# weaves prefix layout (weave texts)
81
# individually named weaves (weave inventories)
82
# annotated knits - prefix|hash|hash-escape layout, we test the third only
83
# as it is the most complex mapper.
84
# individually named knits
85
# individual no-graph knits in packs (signatures)
86
# individual graph knits in packs (inventories)
87
# individual graph nocompression knits in packs (revisions)
88
# plain text knits in packs (texts)
92
'factory':make_versioned_files_factory(WeaveFile,
93
ConstantMapper('inventory')),
96
'support_partial_insertion': False,
100
'factory':make_file_factory(False, ConstantMapper('revisions')),
103
'support_partial_insertion': False,
105
('named-nograph-nodelta-knit-pack', {
106
'cleanup':cleanup_pack_knit,
107
'factory':make_pack_factory(False, False, 1),
110
'support_partial_insertion': False,
112
('named-graph-knit-pack', {
113
'cleanup':cleanup_pack_knit,
114
'factory':make_pack_factory(True, True, 1),
117
'support_partial_insertion': True,
119
('named-graph-nodelta-knit-pack', {
120
'cleanup':cleanup_pack_knit,
121
'factory':make_pack_factory(True, False, 1),
124
'support_partial_insertion': False,
126
('groupcompress-nograph', {
127
'cleanup':groupcompress.cleanup_pack_group,
128
'factory':groupcompress.make_pack_factory(False, False, 1),
131
'support_partial_insertion':False,
134
len_two_scenarios = [
137
'factory':make_versioned_files_factory(WeaveFile,
141
'support_partial_insertion': False,
143
('annotated-knit-escape', {
145
'factory':make_file_factory(True, HashEscapedPrefixMapper()),
148
'support_partial_insertion': False,
150
('plain-knit-pack', {
151
'cleanup':cleanup_pack_knit,
152
'factory':make_pack_factory(True, True, 2),
155
'support_partial_insertion': True,
158
'cleanup':groupcompress.cleanup_pack_group,
159
'factory':groupcompress.make_pack_factory(True, False, 1),
162
'support_partial_insertion':False,
165
scenarios = len_one_scenarios + len_two_scenarios
166
return multiply_tests(to_adapt, scenarios, result)
68
from ..bzr.weave import (
72
from ..bzr.weavefile import write_weave
73
from .scenarios import load_tests_apply_scenarios
76
load_tests = load_tests_apply_scenarios
169
79
def get_diamond_vf(f, trailing_eol=True, left_only=False):
314
224
self.assertTrue('r0' in versions)
315
225
self.assertTrue('r1' in versions)
316
226
self.assertTrue('r2' in versions)
317
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
318
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
319
self.assertEquals(f.get_lines('r2'), ['c\n', 'd\n'])
227
self.assertEqual(f.get_lines('r0'), ['a\n', 'b\n'])
228
self.assertEqual(f.get_lines('r1'), ['b\n', 'c\n'])
229
self.assertEqual(f.get_lines('r2'), ['c\n', 'd\n'])
320
230
self.assertEqual(3, f.num_versions())
321
231
origins = f.annotate('r1')
322
self.assertEquals(origins[0][0], 'r0')
323
self.assertEquals(origins[1][0], 'r1')
232
self.assertEqual(origins[0][0], 'r0')
233
self.assertEqual(origins[1][0], 'r1')
324
234
origins = f.annotate('r2')
325
self.assertEquals(origins[0][0], 'r1')
326
self.assertEquals(origins[1][0], 'r2')
235
self.assertEqual(origins[0][0], 'r1')
236
self.assertEqual(origins[1][0], 'r2')
329
239
f = self.reopen_file()
572
482
next_parent = 'base'
573
483
text_name = 'chain1-'
574
484
text = ['line\n']
575
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
576
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
577
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
578
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
579
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
580
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
581
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
582
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
583
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
584
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
585
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
586
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
587
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
588
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
589
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
590
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
591
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
592
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
593
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
594
19:'1ebed371807ba5935958ad0884595126e8c4e823',
595
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
596
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
597
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
598
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
599
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
600
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
485
sha1s = {0: 'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
486
1: '45e21ea146a81ea44a821737acdb4f9791c8abe7',
487
2: 'e1f11570edf3e2a070052366c582837a4fe4e9fa',
488
3: '26b4b8626da827088c514b8f9bbe4ebf181edda1',
489
4: 'e28a5510be25ba84d31121cff00956f9970ae6f6',
490
5: 'd63ec0ce22e11dcf65a931b69255d3ac747a318d',
491
6: '2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
492
7: '95c14da9cafbf828e3e74a6f016d87926ba234ab',
493
8: '779e9a0b28f9f832528d4b21e17e168c67697272',
494
9: '1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
495
10: '131a2ae712cf51ed62f143e3fbac3d4206c25a05',
496
11: 'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
497
12: '31a2286267f24d8bedaa43355f8ad7129509ea85',
498
13: 'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
499
14: '2c4b1736566b8ca6051e668de68650686a3922f2',
500
15: '5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
501
16: 'b0d2e18d3559a00580f6b49804c23fea500feab3',
502
17: '8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
503
18: '5cf64a3459ae28efa60239e44b20312d25b253f3',
504
19: '1ebed371807ba5935958ad0884595126e8c4e823',
505
20: '2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
506
21: '01edc447978004f6e4e962b417a4ae1955b6fe5d',
507
22: 'd8d8dc49c4bf0bab401e0298bb5ad827768618bb',
508
23: 'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
509
24: 'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
510
25: 'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
602
512
for depth in range(26):
603
513
new_version = text_name + '%s' % depth
709
619
w = self.get_file_corrupted_text()
711
621
self.assertEqual('hello\n', w.get_text('v1'))
712
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
713
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
714
self.assertRaises(errors.WeaveInvalidChecksum, w.check)
622
self.assertRaises(WeaveInvalidChecksum, w.get_text, 'v2')
623
self.assertRaises(WeaveInvalidChecksum, w.get_lines, 'v2')
624
self.assertRaises(WeaveInvalidChecksum, w.check)
716
626
w = self.get_file_corrupted_checksum()
718
628
self.assertEqual('hello\n', w.get_text('v1'))
719
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
720
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
721
self.assertRaises(errors.WeaveInvalidChecksum, w.check)
629
self.assertRaises(WeaveInvalidChecksum, w.get_text, 'v2')
630
self.assertRaises(WeaveInvalidChecksum, w.get_lines, 'v2')
631
self.assertRaises(WeaveInvalidChecksum, w.check)
723
633
def get_file_corrupted_text(self):
724
634
"""Return a versioned file with corrupt text but valid metadata."""
844
754
['base', 'a_ghost'],
845
755
['line\n', 'line_b\n', 'line_c\n'])
846
756
origins = vf.annotate('references_ghost')
847
self.assertEquals(('base', 'line\n'), origins[0])
848
self.assertEquals(('base', 'line_b\n'), origins[1])
849
self.assertEquals(('references_ghost', 'line_c\n'), origins[2])
757
self.assertEqual(('base', 'line\n'), origins[0])
758
self.assertEqual(('base', 'line_b\n'), origins[1])
759
self.assertEqual(('references_ghost', 'line_c\n'), origins[2])
851
761
def test_readonly_mode(self):
852
transport = get_transport(self.get_url('.'))
762
t = self.get_transport()
853
763
factory = self.get_factory()
854
vf = factory('id', transport, 0777, create=True, access_mode='w')
855
vf = factory('id', transport, access_mode='r')
764
vf = factory('id', t, 0o777, create=True, access_mode='w')
765
vf = factory('id', t, access_mode='r')
856
766
self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
857
767
self.assertRaises(errors.ReadOnlyError,
858
768
vf.add_lines_with_ghosts,
880
790
class TestWeave(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
882
792
def get_file(self, name='foo'):
883
return WeaveFile(name, get_transport(self.get_url('.')), create=True,
884
get_scope=self.get_transaction)
793
return WeaveFile(name, self.get_transport(),
795
get_scope=self.get_transaction)
886
797
def get_file_corrupted_text(self):
887
w = WeaveFile('foo', get_transport(self.get_url('.')), create=True,
888
get_scope=self.get_transaction)
798
w = WeaveFile('foo', self.get_transport(),
800
get_scope=self.get_transaction)
889
801
w.add_lines('v1', [], ['hello\n'])
890
802
w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
967
880
self.plan_merge_vf.get_parent_map([('root', 'B')]))
968
881
self.assertEqual({('root', 'D'):(('root', 'C'),)},
969
882
self.plan_merge_vf.get_parent_map([('root', 'D')]))
970
self.assertEqual({('root', 'E:'):(('root', 'B'),('root', 'D'))},
883
self.assertEqual({('root', 'E:'):(('root', 'B'), ('root', 'D'))},
971
884
self.plan_merge_vf.get_parent_map([('root', 'E:')]))
972
885
self.assertEqual({},
973
886
self.plan_merge_vf.get_parent_map([('root', 'F')]))
974
887
self.assertEqual({
975
('root', 'B'):(('root', 'A'),),
976
('root', 'D'):(('root', 'C'),),
977
('root', 'E:'):(('root', 'B'),('root', 'D')),
888
('root', 'B'): (('root', 'A'),),
889
('root', 'D'): (('root', 'C'),),
890
('root', 'E:'): (('root', 'B'), ('root', 'D')),
979
892
self.plan_merge_vf.get_parent_map(
980
893
[('root', 'B'), ('root', 'D'), ('root', 'E:'), ('root', 'F')]))
999
912
# we should be able to read from http with a versioned file.
1000
913
vf = self.get_file()
1001
914
# try an empty file access
1002
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
915
readonly_vf = self.get_factory()('foo',
916
transport.get_transport_from_url(self.get_readonly_url('.')))
1003
917
self.assertEqual([], readonly_vf.versions())
919
def test_readonly_http_works_with_feeling(self):
920
# we should be able to read from http with a versioned file.
1004
922
# now with feeling.
1005
923
vf.add_lines('1', [], ['a\n'])
1006
924
vf.add_lines('2', ['1'], ['b\n', 'a\n'])
1007
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
925
readonly_vf = self.get_factory()('foo',
926
transport.get_transport_from_url(self.get_readonly_url('.')))
1008
927
self.assertEqual(['1', '2'], vf.versions())
928
self.assertEqual(['1', '2'], readonly_vf.versions())
1009
929
for version in readonly_vf.versions():
1010
930
readonly_vf.get_lines(version)
1023
944
class MergeCasesMixin(object):
1025
946
def doMerge(self, base, a, b, mp):
1026
from cStringIO import StringIO
1027
947
from textwrap import dedent
1032
952
w = self.get_file()
1033
w.add_lines('text0', [], map(addcrlf, base))
1034
w.add_lines('text1', ['text0'], map(addcrlf, a))
1035
w.add_lines('text2', ['text0'], map(addcrlf, b))
953
w.add_lines('text0', [], list(map(addcrlf, base)))
954
w.add_lines('text1', ['text0'], list(map(addcrlf, a)))
955
w.add_lines('text2', ['text0'], list(map(addcrlf, b)))
1037
957
self.log_contents(w)
1463
1384
class TestVersionedFiles(TestCaseWithMemoryTransport):
1464
1385
"""Tests for the multiple-file variant of VersionedFile."""
1387
# We want to be sure of behaviour for:
1388
# weaves prefix layout (weave texts)
1389
# individually named weaves (weave inventories)
1390
# annotated knits - prefix|hash|hash-escape layout, we test the third only
1391
# as it is the most complex mapper.
1392
# individually named knits
1393
# individual no-graph knits in packs (signatures)
1394
# individual graph knits in packs (inventories)
1395
# individual graph nocompression knits in packs (revisions)
1396
# plain text knits in packs (texts)
1397
len_one_scenarios = [
1400
'factory': make_versioned_files_factory(WeaveFile,
1401
ConstantMapper('inventory')),
1404
'support_partial_insertion': False,
1408
'factory': make_file_factory(False, ConstantMapper('revisions')),
1411
'support_partial_insertion': False,
1413
('named-nograph-nodelta-knit-pack', {
1414
'cleanup': cleanup_pack_knit,
1415
'factory': make_pack_factory(False, False, 1),
1418
'support_partial_insertion': False,
1420
('named-graph-knit-pack', {
1421
'cleanup': cleanup_pack_knit,
1422
'factory': make_pack_factory(True, True, 1),
1425
'support_partial_insertion': True,
1427
('named-graph-nodelta-knit-pack', {
1428
'cleanup': cleanup_pack_knit,
1429
'factory': make_pack_factory(True, False, 1),
1432
'support_partial_insertion': False,
1434
('groupcompress-nograph', {
1435
'cleanup': groupcompress.cleanup_pack_group,
1436
'factory': groupcompress.make_pack_factory(False, False, 1),
1439
'support_partial_insertion': False,
1442
len_two_scenarios = [
1445
'factory': make_versioned_files_factory(WeaveFile,
1449
'support_partial_insertion': False,
1451
('annotated-knit-escape', {
1453
'factory': make_file_factory(True, HashEscapedPrefixMapper()),
1456
'support_partial_insertion': False,
1458
('plain-knit-pack', {
1459
'cleanup': cleanup_pack_knit,
1460
'factory': make_pack_factory(True, True, 2),
1463
'support_partial_insertion': True,
1466
'cleanup': groupcompress.cleanup_pack_group,
1467
'factory': groupcompress.make_pack_factory(True, False, 1),
1470
'support_partial_insertion': False,
1474
scenarios = len_one_scenarios + len_two_scenarios
1466
1476
def get_versionedfiles(self, relpath='files'):
1467
1477
transport = self.get_transport(relpath)
1468
1478
if relpath != '.':
1480
1490
return ('FileA',) + (suffix,)
1492
def test_add_fallback_implies_without_fallbacks(self):
1493
f = self.get_versionedfiles('files')
1494
if getattr(f, 'add_fallback_versioned_files', None) is None:
1495
raise TestNotApplicable("%s doesn't support fallbacks"
1496
% (f.__class__.__name__,))
1497
g = self.get_versionedfiles('fallback')
1498
key_a = self.get_simple_key('a')
1499
g.add_lines(key_a, [], ['\n'])
1500
f.add_fallback_versioned_files(g)
1501
self.assertTrue(key_a in f.get_parent_map([key_a]))
1502
self.assertFalse(key_a in f.without_fallbacks().get_parent_map([key_a]))
1482
1504
def test_add_lines(self):
1483
1505
f = self.get_versionedfiles()
1484
1506
key0 = self.get_simple_key('r0')
1500
1522
self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1502
def test__add_text(self):
1503
f = self.get_versionedfiles()
1504
key0 = self.get_simple_key('r0')
1505
key1 = self.get_simple_key('r1')
1506
key2 = self.get_simple_key('r2')
1507
keyf = self.get_simple_key('foo')
1508
f._add_text(key0, [], 'a\nb\n')
1510
f._add_text(key1, [key0], 'b\nc\n')
1512
f._add_text(key1, [], 'b\nc\n')
1514
self.assertTrue(key0 in keys)
1515
self.assertTrue(key1 in keys)
1517
for record in f.get_record_stream([key0, key1], 'unordered', True):
1518
records.append((record.key, record.get_bytes_as('fulltext')))
1520
self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1522
1524
def test_annotate(self):
1523
1525
files = self.get_versionedfiles()
1524
1526
self.get_diamond_files(files)
1617
1619
vf.add_lines, new_key, [], lines,
1618
1620
nostore_sha=sha)
1619
1621
self.assertRaises(errors.ExistingContent,
1620
vf._add_text, new_key, [], ''.join(lines),
1622
vf.add_lines, new_key, [], lines,
1621
1623
nostore_sha=sha)
1622
1624
# and no new version should have been added.
1623
record = vf.get_record_stream([new_key], 'unordered', True).next()
1625
record = next(vf.get_record_stream([new_key], 'unordered', True))
1624
1626
self.assertEqual('absent', record.storage_kind)
1626
1628
def test_add_lines_nostoresha(self):
1627
1629
self._add_content_nostoresha(add_lines=True)
1629
def test__add_text_nostoresha(self):
1630
self._add_content_nostoresha(add_lines=False)
1632
1631
def test_add_lines_return(self):
1633
1632
files = self.get_versionedfiles()
1634
1633
# save code by using the stock data insertion helper.
2237
2236
(('r1',), self.get_parents((('r0',),))),
2238
2237
(('r2',), self.get_parents(())),
2239
2238
(('r3',), self.get_parents(())),
2240
(('m',), self.get_parents((('r0',),('r1',),('r2',),('r3',)))),
2239
(('m',), self.get_parents((('r0',), ('r1',), ('r2',), ('r3',)))),
2243
2242
parent_details = [
2654
2653
next_parent = self.get_simple_key('base')
2655
2654
text_name = 'chain1-'
2656
2655
text = ['line\n']
2657
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
2658
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
2659
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
2660
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
2661
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
2662
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
2663
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
2664
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
2665
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
2666
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
2667
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
2668
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
2669
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
2670
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
2671
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
2672
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
2673
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
2674
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
2675
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
2676
19:'1ebed371807ba5935958ad0884595126e8c4e823',
2677
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
2678
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
2679
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
2680
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
2681
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
2682
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
2656
sha1s = {0: 'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
2657
1: '45e21ea146a81ea44a821737acdb4f9791c8abe7',
2658
2: 'e1f11570edf3e2a070052366c582837a4fe4e9fa',
2659
3: '26b4b8626da827088c514b8f9bbe4ebf181edda1',
2660
4: 'e28a5510be25ba84d31121cff00956f9970ae6f6',
2661
5: 'd63ec0ce22e11dcf65a931b69255d3ac747a318d',
2662
6: '2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
2663
7: '95c14da9cafbf828e3e74a6f016d87926ba234ab',
2664
8: '779e9a0b28f9f832528d4b21e17e168c67697272',
2665
9: '1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
2666
10: '131a2ae712cf51ed62f143e3fbac3d4206c25a05',
2667
11: 'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
2668
12: '31a2286267f24d8bedaa43355f8ad7129509ea85',
2669
13: 'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
2670
14: '2c4b1736566b8ca6051e668de68650686a3922f2',
2671
15: '5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
2672
16: 'b0d2e18d3559a00580f6b49804c23fea500feab3',
2673
17: '8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
2674
18: '5cf64a3459ae28efa60239e44b20312d25b253f3',
2675
19: '1ebed371807ba5935958ad0884595126e8c4e823',
2676
20: '2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
2677
21: '01edc447978004f6e4e962b417a4ae1955b6fe5d',
2678
22: 'd8d8dc49c4bf0bab401e0298bb5ad827768618bb',
2679
23: 'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
2680
24: 'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
2681
25: 'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
2684
2683
for depth in range(26):
2685
2684
new_version = self.get_simple_key(text_name + '%s' % depth)
2755
2754
def test_get_sha1s_nonexistent(self):
2756
self.assertEquals({}, self.texts.get_sha1s([("NONEXISTENT",)]))
2755
self.assertEqual({}, self.texts.get_sha1s([("NONEXISTENT",)]))
2758
2757
def test_get_sha1s(self):
2759
2758
self._lines["key"] = ["dataline1", "dataline2"]
2760
self.assertEquals({("key",): osutils.sha_strings(self._lines["key"])},
2759
self.assertEqual({("key",): osutils.sha_strings(self._lines["key"])},
2761
2760
self.texts.get_sha1s([("key",)]))
2763
2762
def test_get_parent_map(self):
2764
2763
self._parent_map = {"G": ("A", "B")}
2765
self.assertEquals({("G",): (("A",),("B",))},
2764
self.assertEqual({("G",): (("A",), ("B",))},
2766
2765
self.texts.get_parent_map([("G",), ("L",)]))
2768
2767
def test_get_record_stream(self):
2769
2768
self._lines["A"] = ["FOO", "BAR"]
2770
2769
it = self.texts.get_record_stream([("A",)], "unordered", True)
2772
self.assertEquals("chunked", record.storage_kind)
2773
self.assertEquals("FOOBAR", record.get_bytes_as("fulltext"))
2774
self.assertEquals(["FOO", "BAR"], record.get_bytes_as("chunked"))
2771
self.assertEqual("chunked", record.storage_kind)
2772
self.assertEqual("FOOBAR", record.get_bytes_as("fulltext"))
2773
self.assertEqual(["FOO", "BAR"], record.get_bytes_as("chunked"))
2776
2775
def test_get_record_stream_absent(self):
2777
2776
it = self.texts.get_record_stream([("A",)], "unordered", True)
2779
self.assertEquals("absent", record.storage_kind)
2778
self.assertEqual("absent", record.storage_kind)
2781
2780
def test_iter_lines_added_or_present_in_keys(self):
2782
2781
self._lines["A"] = ["FOO", "BAR"]
2783
2782
self._lines["B"] = ["HEY"]
2784
2783
self._lines["C"] = ["Alberta"]
2785
2784
it = self.texts.iter_lines_added_or_present_in_keys([("A",), ("B",)])
2786
self.assertEquals(sorted([("FOO", "A"), ("BAR", "A"), ("HEY", "B")]),
2785
self.assertEqual(sorted([("FOO", "A"), ("BAR", "A"), ("HEY", "B")]),
2787
2786
sorted(list(it)))
2792
2791
def get_ordering_vf(self, key_priority):
2793
2792
builder = self.make_branch_builder('test')
2794
2793
builder.start_series()
2795
builder.build_snapshot('A', None, [
2796
('add', ('', 'TREE_ROOT', 'directory', None))])
2797
builder.build_snapshot('B', ['A'], [])
2798
builder.build_snapshot('C', ['B'], [])
2799
builder.build_snapshot('D', ['C'], [])
2794
builder.build_snapshot(None, [
2795
('add', ('', 'TREE_ROOT', 'directory', None))],
2797
builder.build_snapshot(['A'], [], revision_id='B')
2798
builder.build_snapshot(['B'], [], revision_id='C')
2799
builder.build_snapshot(['C'], [], revision_id='D')
2800
2800
builder.finish_series()
2801
2801
b = builder.get_branch()