247
242
# empty revision-store directory
248
243
# empty weaves directory
249
244
t = control.get_repository_transport(None)
250
with t.get('format') as f:
251
self.assertEqualDiff(b'Bazaar-NG Knit Repository Format 1',
245
self.assertEqualDiff('Bazaar-NG Knit Repository Format 1',
246
t.get('format').read())
253
247
# XXX: no locks left when unlocked at the moment
254
248
# self.assertEqualDiff('', t.get('lock').read())
255
249
self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
256
250
self.check_knits(t)
257
251
# Check per-file knits.
258
control.create_branch()
252
branch = control.create_branch()
259
253
tree = control.create_workingtree()
260
tree.add(['foo'], [b'Nasty-IdC:'], ['file'])
261
tree.put_file_bytes_non_atomic('foo', b'')
254
tree.add(['foo'], ['Nasty-IdC:'], ['file'])
255
tree.put_file_bytes_non_atomic('foo', '')
262
256
tree.commit('1st post', rev_id=b'foo')
263
257
self.assertHasKnit(t, 'knits/e8/%254easty-%2549d%2543%253a',
264
b'\nfoo fulltext 0 81 :')
258
'\nfoo fulltext 0 81 :')
266
def assertHasKnit(self, t, knit_name, extra_content=b''):
260
def assertHasKnit(self, t, knit_name, extra_content=''):
267
261
"""Assert that knit_name exists on t."""
268
self.assertEqualDiff(b'# bzr knit index 8\n' + extra_content,
262
self.assertEqualDiff('# bzr knit index 8\n' + extra_content,
269
263
t.get(knit_name + '.kndx').read())
271
265
def check_knits(self, t):
326
317
the whole inventory. So we grab the one from the expected text. Which
327
318
is valid when the api is not being abused.
329
repo = self.make_repository(
330
'.', format=controldir.format_registry.get('knit')())
331
inv_xml = b'<inventory format="5">\n</inventory>\n'
332
inv = repo._deserialise_inventory(b'test-rev-id', [inv_xml])
333
self.assertEqual(b'test-rev-id', inv.root.revision)
320
repo = self.make_repository('.',
321
format=controldir.format_registry.get('knit')())
322
inv_xml = '<inventory format="5">\n</inventory>\n'
323
inv = repo._deserialise_inventory('test-rev-id', inv_xml)
324
self.assertEqual('test-rev-id', inv.root.revision)
335
326
def test_deserialise_uses_global_revision_id(self):
336
327
"""If it is set, then we re-use the global revision id"""
337
repo = self.make_repository(
338
'.', format=controldir.format_registry.get('knit')())
339
inv_xml = (b'<inventory format="5" revision_id="other-rev-id">\n'
328
repo = self.make_repository('.',
329
format=controldir.format_registry.get('knit')())
330
inv_xml = ('<inventory format="5" revision_id="other-rev-id">\n'
341
332
# Arguably, the deserialise_inventory should detect a mismatch, and
342
333
# raise an error, rather than silently using one revision_id over the
344
335
self.assertRaises(AssertionError, repo._deserialise_inventory,
345
b'test-rev-id', [inv_xml])
346
inv = repo._deserialise_inventory(b'other-rev-id', [inv_xml])
347
self.assertEqual(b'other-rev-id', inv.root.revision)
336
'test-rev-id', inv_xml)
337
inv = repo._deserialise_inventory('other-rev-id', inv_xml)
338
self.assertEqual('other-rev-id', inv.root.revision)
349
340
def test_supports_external_lookups(self):
350
repo = self.make_repository(
351
'.', format=controldir.format_registry.get('knit')())
341
repo = self.make_repository('.',
342
format=controldir.format_registry.get('knit')())
352
343
self.assertFalse(repo._format.supports_external_lookups)
513
500
format = bzrdir.BzrDirMetaFormat1()
514
501
format.repository_format = knitrepo.RepositoryFormatKnit1()
515
502
tree = self.make_branch_and_tree('.', format)
516
tree.commit("Dull commit", rev_id=b"dull")
517
revision_tree = tree.branch.repository.revision_tree(b'dull')
518
with revision_tree.lock_read():
520
errors.NoSuchFile, revision_tree.get_file_lines, u'')
503
tree.commit("Dull commit", rev_id="dull")
504
revision_tree = tree.branch.repository.revision_tree('dull')
505
revision_tree.lock_read()
507
self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
508
u'', revision_tree.get_root_id())
510
revision_tree.unlock()
521
511
format = bzrdir.BzrDirMetaFormat1()
522
512
format.repository_format = knitrepo.RepositoryFormatKnit3()
523
513
upgrade.Convert('.', format)
524
514
tree = workingtree.WorkingTree.open('.')
525
revision_tree = tree.branch.repository.revision_tree(b'dull')
526
with revision_tree.lock_read():
527
revision_tree.get_file_lines(u'')
515
revision_tree = tree.branch.repository.revision_tree('dull')
516
revision_tree.lock_read()
518
revision_tree.get_file_lines(u'', revision_tree.get_root_id())
520
revision_tree.unlock()
528
521
tree.commit("Another dull commit", rev_id=b'dull2')
529
revision_tree = tree.branch.repository.revision_tree(b'dull2')
522
revision_tree = tree.branch.repository.revision_tree('dull2')
530
523
revision_tree.lock_read()
531
524
self.addCleanup(revision_tree.unlock)
532
self.assertEqual(b'dull', revision_tree.get_file_revision(u''))
525
self.assertEqual('dull',
526
revision_tree.get_file_revision(u'', revision_tree.get_root_id()))
534
528
def test_supports_external_lookups(self):
535
529
format = bzrdir.BzrDirMetaFormat1()
561
555
builder.build_snapshot(None, [
562
556
('add', ('', b'root-id', 'directory', '')),
563
557
('add', ('file', b'file-id', 'file', b'content\n'))],
565
builder.build_snapshot([b'1'], [
566
('modify', ('file', b'content-2\n'))],
568
builder.finish_series()
569
source = builder.get_branch()
570
target = self.make_repository('target', format='2a')
571
target.fetch(source.repository)
573
self.addCleanup(target.unlock)
574
details = target.texts._index.get_build_details(
575
[(b'file-id', b'1',), (b'file-id', b'2',)])
576
file_1_details = details[(b'file-id', b'1')]
577
file_2_details = details[(b'file-id', b'2')]
578
# The index, and what to read off disk, should be the same for both
579
# versions of the file.
580
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
582
def test_fetch_combines_groups(self):
583
builder = self.make_branch_builder('source', format='2a')
584
builder.start_series()
585
builder.build_snapshot(None, [
586
('add', ('', b'root-id', 'directory', '')),
587
('add', ('file', b'file-id', 'file', b'content\n'))],
589
builder.build_snapshot([b'1'], [
590
('modify', ('file', b'content-2\n'))],
592
builder.finish_series()
593
source = builder.get_branch()
594
target = self.make_repository('target', format='2a')
595
target.fetch(source.repository)
597
self.addCleanup(target.unlock)
598
details = target.texts._index.get_build_details(
599
[(b'file-id', b'1',), (b'file-id', b'2',)])
600
file_1_details = details[(b'file-id', b'1')]
601
file_2_details = details[(b'file-id', b'2')]
602
# The index, and what to read off disk, should be the same for both
603
# versions of the file.
604
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
606
def test_fetch_combines_groups(self):
607
builder = self.make_branch_builder('source', format='2a')
608
builder.start_series()
609
builder.build_snapshot(None, [
610
('add', ('', b'root-id', 'directory', '')),
611
('add', ('file', b'file-id', 'file', b'content\n'))],
559
builder.build_snapshot([b'1'], [
560
('modify', ('file', b'content-2\n'))],
562
builder.finish_series()
563
source = builder.get_branch()
564
target = self.make_repository('target', format='2a')
565
target.fetch(source.repository)
567
self.addCleanup(target.unlock)
568
details = target.texts._index.get_build_details(
569
[(b'file-id', b'1',), (b'file-id', b'2',)])
570
file_1_details = details[(b'file-id', b'1')]
571
file_2_details = details[(b'file-id', b'2')]
572
# The index, and what to read off disk, should be the same for both
573
# versions of the file.
574
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
576
def test_fetch_combines_groups(self):
577
builder = self.make_branch_builder('source', format='2a')
578
builder.start_series()
579
builder.build_snapshot(None, [
580
('add', ('', b'root-id', 'directory', '')),
581
('add', ('file', b'file-id', 'file', 'content\n'))],
583
builder.build_snapshot([b'1'], [
584
('modify', ('file', b'content-2\n'))],
586
builder.finish_series()
587
source = builder.get_branch()
588
target = self.make_repository('target', format='2a')
589
target.fetch(source.repository)
591
self.addCleanup(target.unlock)
592
details = target.texts._index.get_build_details(
593
[(b'file-id', b'1',), (b'file-id', b'2',)])
594
file_1_details = details[(b'file-id', b'1')]
595
file_2_details = details[(b'file-id', b'2')]
596
# The index, and what to read off disk, should be the same for both
597
# versions of the file.
598
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
600
def test_fetch_combines_groups(self):
601
builder = self.make_branch_builder('source', format='2a')
602
builder.start_series()
603
builder.build_snapshot(None, [
604
('add', ('', b'root-id', 'directory', '')),
605
('add', ('file', b'file-id', 'file', 'content\n'))],
612
606
revision_id=b'1')
613
607
builder.build_snapshot([b'1'], [
614
608
('modify', ('file', b'content-2\n'))],
726
720
# On a regular pass, getting the inventories and chk pages for rev-2
727
721
# would only get the newly created chk pages
728
722
search = vf_search.SearchResult({b'rev-2'}, {b'rev-1'}, 1,
730
simple_chk_records = set()
724
simple_chk_records = []
731
725
for vf_name, substream in source.get_stream(search):
732
726
if vf_name == 'chk_bytes':
733
727
for record in substream:
734
simple_chk_records.add(record.key)
728
simple_chk_records.append(record.key)
736
730
for _ in substream:
738
732
# 3 pages, the root (InternalNode), + 2 pages which actually changed
739
self.assertEqual({(b'sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
740
(b'sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
741
(b'sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
742
(b'sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)},
743
set(simple_chk_records))
733
self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
734
('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
735
('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
736
('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
744
738
# Now, when we do a similar call using 'get_stream_for_missing_keys'
745
739
# we should get a much larger set of pages.
746
missing = [('inventories', b'rev-2')]
747
full_chk_records = set()
740
missing = [('inventories', 'rev-2')]
741
full_chk_records = []
748
742
for vf_name, substream in source.get_stream_for_missing_keys(missing):
749
743
if vf_name == 'inventories':
750
744
for record in substream:
751
self.assertEqual((b'rev-2',), record.key)
745
self.assertEqual(('rev-2',), record.key)
752
746
elif vf_name == 'chk_bytes':
753
747
for record in substream:
754
full_chk_records.add(record.key)
748
full_chk_records.append(record.key)
756
750
self.fail('Should not be getting a stream of %s' % (vf_name,))
757
751
# We have 257 records now. This is because we have 1 root page, and 256
774
768
source = self.make_repository('source', format='pack-0.92')
775
769
target = self.make_repository('target', format='pack-0.92')
776
770
stream_source = source._get_source(target._format)
777
self.assertIsInstance(
778
stream_source, knitpack_repo.KnitPackStreamSource)
771
self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
780
773
def test_source_to_exact_pack_rich_root_pack(self):
781
774
source = self.make_repository('source', format='rich-root-pack')
782
775
target = self.make_repository('target', format='rich-root-pack')
783
776
stream_source = source._get_source(target._format)
784
self.assertIsInstance(
785
stream_source, knitpack_repo.KnitPackStreamSource)
777
self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
787
779
def test_source_to_exact_pack_19(self):
788
780
source = self.make_repository('source', format='1.9')
789
781
target = self.make_repository('target', format='1.9')
790
782
stream_source = source._get_source(target._format)
791
self.assertIsInstance(
792
stream_source, knitpack_repo.KnitPackStreamSource)
783
self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
794
785
def test_source_to_exact_pack_19_rich_root(self):
795
786
source = self.make_repository('source', format='1.9-rich-root')
796
787
target = self.make_repository('target', format='1.9-rich-root')
797
788
stream_source = source._get_source(target._format)
798
self.assertIsInstance(
799
stream_source, knitpack_repo.KnitPackStreamSource)
789
self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
801
791
def test_source_to_remote_exact_pack_19(self):
802
792
trans = self.make_smart_server('target')
843
832
super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
844
833
self.builder = self.make_branch_builder('source')
845
834
self.builder.start_series()
846
self.builder.build_snapshot(
848
[('add', ('', b'tree-root', 'directory', None))],
849
revision_id=b'initial')
835
self.builder.build_snapshot(None,
836
[('add', ('', 'tree-root', 'directory', None))],
837
revision_id='initial')
850
838
self.repo = self.builder.get_branch().repository
851
839
self.addCleanup(self.builder.finish_series)
853
841
def assertParentIds(self, expected_result, rev_set):
855
sorted(expected_result),
842
self.assertEqual(sorted(expected_result),
856
843
sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
858
845
def test_simple(self):
859
self.builder.build_snapshot(None, [], revision_id=b'revid1')
860
self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2')
861
rev_set = [b'revid2']
862
self.assertParentIds([b'revid1'], rev_set)
846
self.builder.build_snapshot(None, [], revision_id='revid1')
847
self.builder.build_snapshot(['revid1'], [], revision_id='revid2')
849
self.assertParentIds(['revid1'], rev_set)
864
851
def test_not_first_parent(self):
865
self.builder.build_snapshot(None, [], revision_id=b'revid1')
866
self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2')
867
self.builder.build_snapshot([b'revid2'], [], revision_id=b'revid3')
868
rev_set = [b'revid3', b'revid2']
869
self.assertParentIds([b'revid1'], rev_set)
852
self.builder.build_snapshot(None, [], revision_id='revid1')
853
self.builder.build_snapshot(['revid1'], [], revision_id='revid2')
854
self.builder.build_snapshot(['revid2'], [], revision_id='revid3')
855
rev_set = ['revid3', 'revid2']
856
self.assertParentIds(['revid1'], rev_set)
871
858
def test_not_null(self):
872
rev_set = [b'initial']
859
rev_set = ['initial']
873
860
self.assertParentIds([], rev_set)
875
862
def test_not_null_set(self):
876
self.builder.build_snapshot(None, [], revision_id=b'revid1')
863
self.builder.build_snapshot(None, [], revision_id='revid1')
877
864
rev_set = [_mod_revision.NULL_REVISION]
878
865
self.assertParentIds([], rev_set)
880
867
def test_ghost(self):
881
self.builder.build_snapshot(None, [], revision_id=b'revid1')
882
rev_set = [b'ghost', b'revid1']
883
self.assertParentIds([b'initial'], rev_set)
868
self.builder.build_snapshot(None, [], revision_id='revid1')
869
rev_set = ['ghost', 'revid1']
870
self.assertParentIds(['initial'], rev_set)
885
872
def test_ghost_parent(self):
886
self.builder.build_snapshot(None, [], revision_id=b'revid1')
887
self.builder.build_snapshot(
888
[b'revid1', b'ghost'], [], revision_id=b'revid2')
889
rev_set = [b'revid2', b'revid1']
890
self.assertParentIds([b'ghost', b'initial'], rev_set)
873
self.builder.build_snapshot(None, [], revision_id='revid1')
874
self.builder.build_snapshot(['revid1', 'ghost'], [], revision_id='revid2')
875
rev_set = ['revid2', 'revid1']
876
self.assertParentIds(['ghost', 'initial'], rev_set)
892
878
def test_righthand_parent(self):
893
self.builder.build_snapshot(None, [], revision_id=b'revid1')
894
self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2a')
895
self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2b')
896
self.builder.build_snapshot([b'revid2a', b'revid2b'], [],
897
revision_id=b'revid3')
898
rev_set = [b'revid3', b'revid2a']
899
self.assertParentIds([b'revid1', b'revid2b'], rev_set)
879
self.builder.build_snapshot(None, [], revision_id='revid1')
880
self.builder.build_snapshot(['revid1'], [], revision_id='revid2a')
881
self.builder.build_snapshot(['revid1'], [], revision_id='revid2b')
882
self.builder.build_snapshot(['revid2a', 'revid2b'], [],
883
revision_id='revid3')
884
rev_set = ['revid3', 'revid2a']
885
self.assertParentIds(['revid1', 'revid2b'], rev_set)
902
888
class TestWithBrokenRepo(TestCaseWithTransport):
914
900
repo.start_write_group()
915
901
cleanups.append(repo.commit_write_group)
916
902
# make rev1a: A well-formed revision, containing 'file1'
917
inv = inventory.Inventory(revision_id=b'rev1a')
918
inv.root.revision = b'rev1a'
919
self.add_file(repo, inv, 'file1', b'rev1a', [])
920
repo.texts.add_lines((inv.root.file_id, b'rev1a'), [], [])
921
repo.add_inventory(b'rev1a', inv, [])
922
revision = _mod_revision.Revision(
903
inv = inventory.Inventory(revision_id='rev1a')
904
inv.root.revision = 'rev1a'
905
self.add_file(repo, inv, 'file1', 'rev1a', [])
906
repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
907
repo.add_inventory('rev1a', inv, [])
908
revision = _mod_revision.Revision('rev1a',
924
909
committer='jrandom@example.com', timestamp=0,
925
910
inventory_sha1='', timezone=0, message='foo', parent_ids=[])
926
repo.add_revision(b'rev1a', revision, inv)
911
repo.add_revision('rev1a', revision, inv)
928
913
# make rev1b, which has no Revision, but has an Inventory, and
930
inv = inventory.Inventory(revision_id=b'rev1b')
931
inv.root.revision = b'rev1b'
932
self.add_file(repo, inv, 'file1', b'rev1b', [])
933
repo.add_inventory(b'rev1b', inv, [])
915
inv = inventory.Inventory(revision_id='rev1b')
916
inv.root.revision = 'rev1b'
917
self.add_file(repo, inv, 'file1', 'rev1b', [])
918
repo.add_inventory('rev1b', inv, [])
935
920
# make rev2, with file1 and file2
937
922
# file1 has 'rev1b' as an ancestor, even though this is not
938
923
# mentioned by 'rev1a', making it an unreferenced ancestor
939
924
inv = inventory.Inventory()
940
self.add_file(repo, inv, 'file1', b'rev2', [b'rev1a', b'rev1b'])
941
self.add_file(repo, inv, 'file2', b'rev2', [])
942
self.add_revision(repo, b'rev2', inv, [b'rev1a'])
925
self.add_file(repo, inv, 'file1', 'rev2', ['rev1a', 'rev1b'])
926
self.add_file(repo, inv, 'file2', 'rev2', [])
927
self.add_revision(repo, 'rev2', inv, ['rev1a'])
944
929
# make ghost revision rev1c
945
930
inv = inventory.Inventory()
946
self.add_file(repo, inv, 'file2', b'rev1c', [])
931
self.add_file(repo, inv, 'file2', 'rev1c', [])
948
933
# make rev3 with file2
949
934
# file2 refers to 'rev1c', which is a ghost in this repository, so
950
935
# file2 cannot have rev1c as its ancestor.
951
936
inv = inventory.Inventory()
952
self.add_file(repo, inv, 'file2', b'rev3', [b'rev1c'])
953
self.add_revision(repo, b'rev3', inv, [b'rev1c'])
937
self.add_file(repo, inv, 'file2', 'rev3', ['rev1c'])
938
self.add_revision(repo, 'rev3', inv, ['rev1c'])
956
941
for cleanup in reversed(cleanups):
961
946
inv.root.revision = revision_id
962
947
repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
963
948
repo.add_inventory(revision_id, inv, parent_ids)
964
revision = _mod_revision.Revision(
949
revision = _mod_revision.Revision(revision_id,
966
950
committer='jrandom@example.com', timestamp=0, inventory_sha1='',
967
951
timezone=0, message='foo', parent_ids=parent_ids)
968
952
repo.add_revision(revision_id, revision, inv)
970
954
def add_file(self, repo, inv, filename, revision, parents):
971
file_id = filename.encode('utf-8') + b'-id'
972
content = [b'line\n']
973
entry = inventory.InventoryFile(file_id, filename, b'TREE_ROOT')
955
file_id = filename + '-id'
956
entry = inventory.InventoryFile(file_id, filename, 'TREE_ROOT')
974
957
entry.revision = revision
975
entry.text_sha1 = osutils.sha_strings(content)
976
958
entry.text_size = 0
978
960
text_key = (file_id, revision)
979
961
parent_keys = [(file_id, parent) for parent in parents]
980
repo.texts.add_lines(text_key, parent_keys, content)
962
repo.texts.add_lines(text_key, parent_keys, ['line\n'])
982
964
def test_insert_from_broken_repo(self):
983
965
"""Inserting a data stream from a broken repository won't silently
1041
1023
def test__clear_obsolete_packs_preserve(self):
1042
1024
packs = self.get_packs()
1043
1025
obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1044
obsolete_pack_trans.put_bytes('a-pack.pack', b'content\n')
1045
obsolete_pack_trans.put_bytes('a-pack.rix', b'content\n')
1046
obsolete_pack_trans.put_bytes('a-pack.iix', b'content\n')
1047
obsolete_pack_trans.put_bytes('another-pack.pack', b'foo\n')
1048
obsolete_pack_trans.put_bytes('not-a-pack.rix', b'foo\n')
1026
obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1027
obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1028
obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1029
obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1030
obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1049
1031
res = packs._clear_obsolete_packs(preserve={'a-pack'})
1050
1032
self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1051
1033
self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
1132
1114
def test_pack_distribution_one_to_nine(self):
1133
1115
packs = self.get_packs()
1134
1116
self.assertEqual([1],
1135
packs.pack_distribution(1))
1117
packs.pack_distribution(1))
1136
1118
self.assertEqual([1, 1],
1137
packs.pack_distribution(2))
1119
packs.pack_distribution(2))
1138
1120
self.assertEqual([1, 1, 1],
1139
packs.pack_distribution(3))
1121
packs.pack_distribution(3))
1140
1122
self.assertEqual([1, 1, 1, 1],
1141
packs.pack_distribution(4))
1123
packs.pack_distribution(4))
1142
1124
self.assertEqual([1, 1, 1, 1, 1],
1143
packs.pack_distribution(5))
1125
packs.pack_distribution(5))
1144
1126
self.assertEqual([1, 1, 1, 1, 1, 1],
1145
packs.pack_distribution(6))
1127
packs.pack_distribution(6))
1146
1128
self.assertEqual([1, 1, 1, 1, 1, 1, 1],
1147
packs.pack_distribution(7))
1129
packs.pack_distribution(7))
1148
1130
self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1],
1149
packs.pack_distribution(8))
1131
packs.pack_distribution(8))
1150
1132
self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1, 1],
1151
packs.pack_distribution(9))
1133
packs.pack_distribution(9))
1153
1135
def test_pack_distribution_stable_at_boundaries(self):
1154
1136
"""When there are multi-rev packs the counts are stable."""
1316
1296
packs._remove_pack_from_memory(removed_pack)
1317
1297
names = packs.names()
1318
1298
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1319
new_names = {x[0] for x in new_nodes}
1320
self.assertEqual(names, sorted([x[0] for x in all_nodes]))
1299
new_names = {x[0][0] for x in new_nodes}
1300
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1321
1301
self.assertEqual(set(names) - set(orig_names), new_names)
1322
1302
self.assertEqual({new_pack.name}, new_names)
1323
1303
self.assertEqual([to_remove_name],
1324
sorted([x[0] for x in deleted_nodes]))
1304
sorted([x[0][0] for x in deleted_nodes]))
1325
1305
packs.reload_pack_names()
1326
1306
reloaded_names = packs.names()
1327
1307
self.assertEqual(orig_at_load, packs._packs_at_load)
1328
1308
self.assertEqual(names, reloaded_names)
1329
1309
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1330
new_names = {x[0] for x in new_nodes}
1331
self.assertEqual(names, sorted([x[0] for x in all_nodes]))
1310
new_names = {x[0][0] for x in new_nodes}
1311
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1332
1312
self.assertEqual(set(names) - set(orig_names), new_names)
1333
1313
self.assertEqual({new_pack.name}, new_names)
1334
1314
self.assertEqual([to_remove_name],
1335
sorted([x[0] for x in deleted_nodes]))
1315
sorted([x[0][0] for x in deleted_nodes]))
1337
1317
def test_autopack_obsoletes_new_pack(self):
1338
1318
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1494
1473
builder = self.make_branch_builder('.', format="1.9")
1495
1474
builder.start_series()
1496
1475
builder.build_snapshot(None, [
1497
('add', ('', b'root-id', 'directory', None)),
1498
('add', ('f', b'f-id', 'file', b'content\n'))],
1500
builder.build_snapshot([b'A'],
1501
[('modify', ('f', b'new-content\n'))],
1503
builder.build_snapshot([b'B'],
1504
[('modify', ('f', b'third-content\n'))],
1506
builder.build_snapshot([b'C'],
1507
[('modify', ('f', b'fourth-content\n'))],
1476
('add', ('', 'root-id', 'directory', None)),
1477
('add', ('f', 'f-id', 'file', 'content\n'))],
1479
builder.build_snapshot(['A'],
1480
[('modify', ('f', 'new-content\n'))],
1482
builder.build_snapshot(['B'],
1483
[('modify', ('f', 'third-content\n'))],
1485
builder.build_snapshot(['C'],
1486
[('modify', ('f', 'fourth-content\n'))],
1509
1488
b = builder.get_branch()
1511
1490
builder.finish_series()
1551
1530
builder = self.make_branch_builder('source')
1552
1531
builder.start_series()
1553
1532
builder.build_snapshot(None, [
1554
('add', ('', b'root-id', 'directory', None)),
1555
('add', ('file', b'file-id', 'file', b'content\n')),
1556
], revision_id=b'A')
1557
builder.build_snapshot([b'A'], [
1558
('add', ('dir', b'dir-id', 'directory', None))],
1560
builder.build_snapshot([b'B'], [
1561
('modify', ('file', b'new content\n'))],
1533
('add', ('', 'root-id', 'directory', None)),
1534
('add', ('file', 'file-id', 'file', 'content\n')),
1536
builder.build_snapshot(['A'], [
1537
('add', ('dir', 'dir-id', 'directory', None))],
1539
builder.build_snapshot(['B'], [
1540
('modify', ('file', 'new content\n'))],
1563
1542
builder.finish_series()
1564
1543
return builder.get_branch()
1589
1566
self.addCleanup(repo_not_stacked.unlock)
1590
1567
# Now we should have a pack file with A's inventory, but not its
1592
self.assertEqual([(b'A',), (b'B',)],
1569
self.assertEqual([('A',), ('B',)],
1593
1570
sorted(repo_not_stacked.inventories.keys()))
1594
self.assertEqual([(b'B',)],
1571
self.assertEqual([('B',)],
1595
1572
sorted(repo_not_stacked.revisions.keys()))
1596
1573
stacked_pack_names = repo_not_stacked._pack_collection.names()
1597
1574
# We have a couple names here, figure out which has A's inventory
1598
1575
for name in stacked_pack_names:
1599
1576
pack = repo_not_stacked._pack_collection.get_pack_by_name(name)
1600
1577
keys = [n[1] for n in pack.inventory_index.iter_all_entries()]
1602
1579
inv_a_pack_name = name
1605
1582
self.fail('Could not find pack containing A\'s inventory')
1606
repo_not_stacked.fetch(b_source.repository, b'A')
1607
self.assertEqual([(b'A',), (b'B',)],
1583
repo_not_stacked.fetch(b_source.repository, 'A')
1584
self.assertEqual([('A',), ('B',)],
1608
1585
sorted(repo_not_stacked.revisions.keys()))
1609
1586
new_pack_names = set(repo_not_stacked._pack_collection.names())
1610
1587
rev_a_pack_names = new_pack_names.difference(stacked_pack_names)
1611
1588
self.assertEqual(1, len(rev_a_pack_names))
1612
1589
rev_a_pack_name = list(rev_a_pack_names)[0]
1613
1590
# Now fetch 'C', so we have a couple pack files to join
1614
repo_not_stacked.fetch(b_source.repository, b'C')
1591
repo_not_stacked.fetch(b_source.repository, 'C')
1615
1592
rev_c_pack_names = set(repo_not_stacked._pack_collection.names())
1616
1593
rev_c_pack_names = rev_c_pack_names.difference(new_pack_names)
1617
1594
self.assertEqual(1, len(rev_c_pack_names))