249
242
# empty revision-store directory
250
243
# empty weaves directory
251
244
t = control.get_repository_transport(None)
252
with t.get('format') as f:
253
self.assertEqualDiff(b'Bazaar-NG Knit Repository Format 1',
245
self.assertEqualDiff('Bazaar-NG Knit Repository Format 1',
246
t.get('format').read())
255
247
# XXX: no locks left when unlocked at the moment
256
248
# self.assertEqualDiff('', t.get('lock').read())
257
249
self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
258
250
self.check_knits(t)
259
251
# Check per-file knits.
260
control.create_branch()
252
branch = control.create_branch()
261
253
tree = control.create_workingtree()
262
tree.add(['foo'], [b'Nasty-IdC:'], ['file'])
263
tree.put_file_bytes_non_atomic('foo', b'')
264
tree.commit('1st post', rev_id=b'foo')
254
tree.add(['foo'], ['Nasty-IdC:'], ['file'])
255
tree.put_file_bytes_non_atomic('Nasty-IdC:', '')
256
tree.commit('1st post', rev_id='foo')
265
257
self.assertHasKnit(t, 'knits/e8/%254easty-%2549d%2543%253a',
266
b'\nfoo fulltext 0 81 :')
258
'\nfoo fulltext 0 81 :')
268
def assertHasKnit(self, t, knit_name, extra_content=b''):
260
def assertHasKnit(self, t, knit_name, extra_content=''):
269
261
"""Assert that knit_name exists on t."""
270
self.assertEqualDiff(b'# bzr knit index 8\n' + extra_content,
262
self.assertEqualDiff('# bzr knit index 8\n' + extra_content,
271
263
t.get(knit_name + '.kndx').read())
273
265
def check_knits(self, t):
328
317
the whole inventory. So we grab the one from the expected text. Which
329
318
is valid when the api is not being abused.
331
repo = self.make_repository(
332
'.', format=controldir.format_registry.get('knit')())
333
inv_xml = b'<inventory format="5">\n</inventory>\n'
334
inv = repo._deserialise_inventory(b'test-rev-id', [inv_xml])
335
self.assertEqual(b'test-rev-id', inv.root.revision)
320
repo = self.make_repository('.',
321
format=controldir.format_registry.get('knit')())
322
inv_xml = '<inventory format="5">\n</inventory>\n'
323
inv = repo._deserialise_inventory('test-rev-id', inv_xml)
324
self.assertEqual('test-rev-id', inv.root.revision)
337
326
def test_deserialise_uses_global_revision_id(self):
338
327
"""If it is set, then we re-use the global revision id"""
339
repo = self.make_repository(
340
'.', format=controldir.format_registry.get('knit')())
341
inv_xml = (b'<inventory format="5" revision_id="other-rev-id">\n'
328
repo = self.make_repository('.',
329
format=controldir.format_registry.get('knit')())
330
inv_xml = ('<inventory format="5" revision_id="other-rev-id">\n'
343
332
# Arguably, the deserialise_inventory should detect a mismatch, and
344
333
# raise an error, rather than silently using one revision_id over the
346
335
self.assertRaises(AssertionError, repo._deserialise_inventory,
347
b'test-rev-id', [inv_xml])
348
inv = repo._deserialise_inventory(b'other-rev-id', [inv_xml])
349
self.assertEqual(b'other-rev-id', inv.root.revision)
336
'test-rev-id', inv_xml)
337
inv = repo._deserialise_inventory('other-rev-id', inv_xml)
338
self.assertEqual('other-rev-id', inv.root.revision)
351
340
def test_supports_external_lookups(self):
352
repo = self.make_repository(
353
'.', format=controldir.format_registry.get('knit')())
341
repo = self.make_repository('.',
342
format=controldir.format_registry.get('knit')())
354
343
self.assertFalse(repo._format.supports_external_lookups)
515
503
format = bzrdir.BzrDirMetaFormat1()
516
504
format.repository_format = knitrepo.RepositoryFormatKnit1()
517
505
tree = self.make_branch_and_tree('.', format)
518
tree.commit("Dull commit", rev_id=b"dull")
519
revision_tree = tree.branch.repository.revision_tree(b'dull')
520
with revision_tree.lock_read():
522
errors.NoSuchFile, revision_tree.get_file_lines, u'')
506
tree.commit("Dull commit", rev_id="dull")
507
revision_tree = tree.branch.repository.revision_tree('dull')
508
revision_tree.lock_read()
510
self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
511
revision_tree.get_root_id())
513
revision_tree.unlock()
523
514
format = bzrdir.BzrDirMetaFormat1()
524
515
format.repository_format = knitrepo.RepositoryFormatKnit3()
525
516
upgrade.Convert('.', format)
526
517
tree = workingtree.WorkingTree.open('.')
527
revision_tree = tree.branch.repository.revision_tree(b'dull')
528
with revision_tree.lock_read():
529
revision_tree.get_file_lines(u'')
530
tree.commit("Another dull commit", rev_id=b'dull2')
531
revision_tree = tree.branch.repository.revision_tree(b'dull2')
518
revision_tree = tree.branch.repository.revision_tree('dull')
519
revision_tree.lock_read()
521
revision_tree.get_file_lines(revision_tree.get_root_id())
523
revision_tree.unlock()
524
tree.commit("Another dull commit", rev_id='dull2')
525
revision_tree = tree.branch.repository.revision_tree('dull2')
532
526
revision_tree.lock_read()
533
527
self.addCleanup(revision_tree.unlock)
534
self.assertEqual(b'dull', revision_tree.get_file_revision(u''))
528
self.assertEqual('dull',
529
revision_tree.get_file_revision(revision_tree.get_root_id()))
536
531
def test_supports_external_lookups(self):
537
532
format = bzrdir.BzrDirMetaFormat1()
560
555
def test_fetch_combines_groups(self):
561
556
builder = self.make_branch_builder('source', format='2a')
562
557
builder.start_series()
563
builder.build_snapshot(None, [
564
('add', ('', b'root-id', 'directory', '')),
565
('add', ('file', b'file-id', 'file', b'content\n'))],
567
builder.build_snapshot([b'1'], [
568
('modify', ('file', b'content-2\n'))],
570
builder.finish_series()
571
source = builder.get_branch()
572
target = self.make_repository('target', format='2a')
573
target.fetch(source.repository)
575
self.addCleanup(target.unlock)
576
details = target.texts._index.get_build_details(
577
[(b'file-id', b'1',), (b'file-id', b'2',)])
578
file_1_details = details[(b'file-id', b'1')]
579
file_2_details = details[(b'file-id', b'2')]
580
# The index, and what to read off disk, should be the same for both
581
# versions of the file.
582
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
584
def test_fetch_combines_groups(self):
585
builder = self.make_branch_builder('source', format='2a')
586
builder.start_series()
587
builder.build_snapshot(None, [
588
('add', ('', b'root-id', 'directory', '')),
589
('add', ('file', b'file-id', 'file', b'content\n'))],
591
builder.build_snapshot([b'1'], [
592
('modify', ('file', b'content-2\n'))],
594
builder.finish_series()
595
source = builder.get_branch()
596
target = self.make_repository('target', format='2a')
597
target.fetch(source.repository)
599
self.addCleanup(target.unlock)
600
details = target.texts._index.get_build_details(
601
[(b'file-id', b'1',), (b'file-id', b'2',)])
602
file_1_details = details[(b'file-id', b'1')]
603
file_2_details = details[(b'file-id', b'2')]
604
# The index, and what to read off disk, should be the same for both
605
# versions of the file.
606
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
608
def test_fetch_combines_groups(self):
609
builder = self.make_branch_builder('source', format='2a')
610
builder.start_series()
611
builder.build_snapshot(None, [
612
('add', ('', b'root-id', 'directory', '')),
613
('add', ('file', b'file-id', 'file', b'content\n'))],
615
builder.build_snapshot([b'1'], [
616
('modify', ('file', b'content-2\n'))],
618
builder.finish_series()
619
source = builder.get_branch()
620
target = self.make_repository('target', format='2a')
621
target.fetch(source.repository)
623
self.addCleanup(target.unlock)
624
details = target.texts._index.get_build_details(
625
[(b'file-id', b'1',), (b'file-id', b'2',)])
626
file_1_details = details[(b'file-id', b'1')]
627
file_2_details = details[(b'file-id', b'2')]
558
builder.build_snapshot('1', None, [
559
('add', ('', 'root-id', 'directory', '')),
560
('add', ('file', 'file-id', 'file', 'content\n'))])
561
builder.build_snapshot('2', ['1'], [
562
('modify', ('file-id', 'content-2\n'))])
563
builder.finish_series()
564
source = builder.get_branch()
565
target = self.make_repository('target', format='2a')
566
target.fetch(source.repository)
568
self.addCleanup(target.unlock)
569
details = target.texts._index.get_build_details(
570
[('file-id', '1',), ('file-id', '2',)])
571
file_1_details = details[('file-id', '1')]
572
file_2_details = details[('file-id', '2')]
573
# The index, and what to read off disk, should be the same for both
574
# versions of the file.
575
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
577
def test_fetch_combines_groups(self):
578
builder = self.make_branch_builder('source', format='2a')
579
builder.start_series()
580
builder.build_snapshot('1', None, [
581
('add', ('', 'root-id', 'directory', '')),
582
('add', ('file', 'file-id', 'file', 'content\n'))])
583
builder.build_snapshot('2', ['1'], [
584
('modify', ('file-id', 'content-2\n'))])
585
builder.finish_series()
586
source = builder.get_branch()
587
target = self.make_repository('target', format='2a')
588
target.fetch(source.repository)
590
self.addCleanup(target.unlock)
591
details = target.texts._index.get_build_details(
592
[('file-id', '1',), ('file-id', '2',)])
593
file_1_details = details[('file-id', '1')]
594
file_2_details = details[('file-id', '2')]
595
# The index, and what to read off disk, should be the same for both
596
# versions of the file.
597
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
599
def test_fetch_combines_groups(self):
600
builder = self.make_branch_builder('source', format='2a')
601
builder.start_series()
602
builder.build_snapshot('1', None, [
603
('add', ('', 'root-id', 'directory', '')),
604
('add', ('file', 'file-id', 'file', 'content\n'))])
605
builder.build_snapshot('2', ['1'], [
606
('modify', ('file-id', 'content-2\n'))])
607
builder.finish_series()
608
source = builder.get_branch()
609
target = self.make_repository('target', format='2a')
610
target.fetch(source.repository)
612
self.addCleanup(target.unlock)
613
details = target.texts._index.get_build_details(
614
[('file-id', '1',), ('file-id', '2',)])
615
file_1_details = details[('file-id', '1')]
616
file_2_details = details[('file-id', '2')]
628
617
# The index, and what to read off disk, should be the same for both
629
618
# versions of the file.
630
619
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
699
688
def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
700
689
source_builder = self.make_branch_builder('source',
702
691
# We have to build a fairly large tree, so that we are sure the chk
703
692
# pages will have split into multiple pages.
704
entries = [('add', ('', b'a-root-id', 'directory', None))]
693
entries = [('add', ('', 'a-root-id', 'directory', None))]
705
694
for i in 'abcdefghijklmnopqrstuvwxyz123456789':
706
695
for j in 'abcdefghijklmnopqrstuvwxyz123456789':
708
fid = fname.encode('utf-8') + b'-id'
709
content = b'content for %s\n' % (fname.encode('utf-8'),)
698
content = 'content for %s\n' % (fname,)
710
699
entries.append(('add', (fname, fid, 'file', content)))
711
700
source_builder.start_series()
712
source_builder.build_snapshot(None, entries, revision_id=b'rev-1')
701
source_builder.build_snapshot('rev-1', None, entries)
713
702
# Now change a few of them, so we get a few new pages for the second
715
source_builder.build_snapshot([b'rev-1'], [
716
('modify', ('aa', b'new content for aa-id\n')),
717
('modify', ('cc', b'new content for cc-id\n')),
718
('modify', ('zz', b'new content for zz-id\n')),
719
], revision_id=b'rev-2')
704
source_builder.build_snapshot('rev-2', ['rev-1'], [
705
('modify', ('aa-id', 'new content for aa-id\n')),
706
('modify', ('cc-id', 'new content for cc-id\n')),
707
('modify', ('zz-id', 'new content for zz-id\n')),
720
709
source_builder.finish_series()
721
710
source_branch = source_builder.get_branch()
722
711
source_branch.lock_read()
728
717
# On a regular pass, getting the inventories and chk pages for rev-2
729
718
# would only get the newly created chk pages
730
search = vf_search.SearchResult({b'rev-2'}, {b'rev-1'}, 1,
732
simple_chk_records = set()
719
search = vf_search.SearchResult({'rev-2'}, {'rev-1'}, 1,
721
simple_chk_records = []
733
722
for vf_name, substream in source.get_stream(search):
734
723
if vf_name == 'chk_bytes':
735
724
for record in substream:
736
simple_chk_records.add(record.key)
725
simple_chk_records.append(record.key)
738
727
for _ in substream:
740
729
# 3 pages, the root (InternalNode), + 2 pages which actually changed
741
self.assertEqual({(b'sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
742
(b'sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
743
(b'sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
744
(b'sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)},
745
set(simple_chk_records))
730
self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
731
('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
732
('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
733
('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
746
735
# Now, when we do a similar call using 'get_stream_for_missing_keys'
747
736
# we should get a much larger set of pages.
748
missing = [('inventories', b'rev-2')]
749
full_chk_records = set()
737
missing = [('inventories', 'rev-2')]
738
full_chk_records = []
750
739
for vf_name, substream in source.get_stream_for_missing_keys(missing):
751
740
if vf_name == 'inventories':
752
741
for record in substream:
753
self.assertEqual((b'rev-2',), record.key)
742
self.assertEqual(('rev-2',), record.key)
754
743
elif vf_name == 'chk_bytes':
755
744
for record in substream:
756
full_chk_records.add(record.key)
745
full_chk_records.append(record.key)
758
747
self.fail('Should not be getting a stream of %s' % (vf_name,))
759
748
# We have 257 records now. This is because we have 1 root page, and 256
776
765
source = self.make_repository('source', format='pack-0.92')
777
766
target = self.make_repository('target', format='pack-0.92')
778
767
stream_source = source._get_source(target._format)
779
self.assertIsInstance(
780
stream_source, knitpack_repo.KnitPackStreamSource)
768
self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
782
770
def test_source_to_exact_pack_rich_root_pack(self):
783
771
source = self.make_repository('source', format='rich-root-pack')
784
772
target = self.make_repository('target', format='rich-root-pack')
785
773
stream_source = source._get_source(target._format)
786
self.assertIsInstance(
787
stream_source, knitpack_repo.KnitPackStreamSource)
774
self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
789
776
def test_source_to_exact_pack_19(self):
790
777
source = self.make_repository('source', format='1.9')
791
778
target = self.make_repository('target', format='1.9')
792
779
stream_source = source._get_source(target._format)
793
self.assertIsInstance(
794
stream_source, knitpack_repo.KnitPackStreamSource)
780
self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
796
782
def test_source_to_exact_pack_19_rich_root(self):
797
783
source = self.make_repository('source', format='1.9-rich-root')
798
784
target = self.make_repository('target', format='1.9-rich-root')
799
785
stream_source = source._get_source(target._format)
800
self.assertIsInstance(
801
stream_source, knitpack_repo.KnitPackStreamSource)
786
self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
803
788
def test_source_to_remote_exact_pack_19(self):
804
789
trans = self.make_smart_server('target')
845
829
super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
846
830
self.builder = self.make_branch_builder('source')
847
831
self.builder.start_series()
848
self.builder.build_snapshot(
850
[('add', ('', b'tree-root', 'directory', None))],
851
revision_id=b'initial')
832
self.builder.build_snapshot('initial', None,
833
[('add', ('', 'tree-root', 'directory', None))])
852
834
self.repo = self.builder.get_branch().repository
853
835
self.addCleanup(self.builder.finish_series)
855
837
def assertParentIds(self, expected_result, rev_set):
857
sorted(expected_result),
838
self.assertEqual(sorted(expected_result),
858
839
sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
860
841
def test_simple(self):
861
self.builder.build_snapshot(None, [], revision_id=b'revid1')
862
self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2')
863
rev_set = [b'revid2']
864
self.assertParentIds([b'revid1'], rev_set)
842
self.builder.build_snapshot('revid1', None, [])
843
self.builder.build_snapshot('revid2', ['revid1'], [])
845
self.assertParentIds(['revid1'], rev_set)
866
847
def test_not_first_parent(self):
867
self.builder.build_snapshot(None, [], revision_id=b'revid1')
868
self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2')
869
self.builder.build_snapshot([b'revid2'], [], revision_id=b'revid3')
870
rev_set = [b'revid3', b'revid2']
871
self.assertParentIds([b'revid1'], rev_set)
848
self.builder.build_snapshot('revid1', None, [])
849
self.builder.build_snapshot('revid2', ['revid1'], [])
850
self.builder.build_snapshot('revid3', ['revid2'], [])
851
rev_set = ['revid3', 'revid2']
852
self.assertParentIds(['revid1'], rev_set)
873
854
def test_not_null(self):
874
rev_set = [b'initial']
855
rev_set = ['initial']
875
856
self.assertParentIds([], rev_set)
877
858
def test_not_null_set(self):
878
self.builder.build_snapshot(None, [], revision_id=b'revid1')
859
self.builder.build_snapshot('revid1', None, [])
879
860
rev_set = [_mod_revision.NULL_REVISION]
880
861
self.assertParentIds([], rev_set)
882
863
def test_ghost(self):
883
self.builder.build_snapshot(None, [], revision_id=b'revid1')
884
rev_set = [b'ghost', b'revid1']
885
self.assertParentIds([b'initial'], rev_set)
864
self.builder.build_snapshot('revid1', None, [])
865
rev_set = ['ghost', 'revid1']
866
self.assertParentIds(['initial'], rev_set)
887
868
def test_ghost_parent(self):
888
self.builder.build_snapshot(None, [], revision_id=b'revid1')
889
self.builder.build_snapshot(
890
[b'revid1', b'ghost'], [], revision_id=b'revid2')
891
rev_set = [b'revid2', b'revid1']
892
self.assertParentIds([b'ghost', b'initial'], rev_set)
869
self.builder.build_snapshot('revid1', None, [])
870
self.builder.build_snapshot('revid2', ['revid1', 'ghost'], [])
871
rev_set = ['revid2', 'revid1']
872
self.assertParentIds(['ghost', 'initial'], rev_set)
894
874
def test_righthand_parent(self):
895
self.builder.build_snapshot(None, [], revision_id=b'revid1')
896
self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2a')
897
self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2b')
898
self.builder.build_snapshot([b'revid2a', b'revid2b'], [],
899
revision_id=b'revid3')
900
rev_set = [b'revid3', b'revid2a']
901
self.assertParentIds([b'revid1', b'revid2b'], rev_set)
875
self.builder.build_snapshot('revid1', None, [])
876
self.builder.build_snapshot('revid2a', ['revid1'], [])
877
self.builder.build_snapshot('revid2b', ['revid1'], [])
878
self.builder.build_snapshot('revid3', ['revid2a', 'revid2b'], [])
879
rev_set = ['revid3', 'revid2a']
880
self.assertParentIds(['revid1', 'revid2b'], rev_set)
904
883
class TestWithBrokenRepo(TestCaseWithTransport):
916
895
repo.start_write_group()
917
896
cleanups.append(repo.commit_write_group)
918
897
# make rev1a: A well-formed revision, containing 'file1'
919
inv = inventory.Inventory(revision_id=b'rev1a')
920
inv.root.revision = b'rev1a'
921
self.add_file(repo, inv, 'file1', b'rev1a', [])
922
repo.texts.add_lines((inv.root.file_id, b'rev1a'), [], [])
923
repo.add_inventory(b'rev1a', inv, [])
924
revision = _mod_revision.Revision(
898
inv = inventory.Inventory(revision_id='rev1a')
899
inv.root.revision = 'rev1a'
900
self.add_file(repo, inv, 'file1', 'rev1a', [])
901
repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
902
repo.add_inventory('rev1a', inv, [])
903
revision = _mod_revision.Revision('rev1a',
926
904
committer='jrandom@example.com', timestamp=0,
927
905
inventory_sha1='', timezone=0, message='foo', parent_ids=[])
928
repo.add_revision(b'rev1a', revision, inv)
906
repo.add_revision('rev1a', revision, inv)
930
908
# make rev1b, which has no Revision, but has an Inventory, and
932
inv = inventory.Inventory(revision_id=b'rev1b')
933
inv.root.revision = b'rev1b'
934
self.add_file(repo, inv, 'file1', b'rev1b', [])
935
repo.add_inventory(b'rev1b', inv, [])
910
inv = inventory.Inventory(revision_id='rev1b')
911
inv.root.revision = 'rev1b'
912
self.add_file(repo, inv, 'file1', 'rev1b', [])
913
repo.add_inventory('rev1b', inv, [])
937
915
# make rev2, with file1 and file2
939
917
# file1 has 'rev1b' as an ancestor, even though this is not
940
918
# mentioned by 'rev1a', making it an unreferenced ancestor
941
919
inv = inventory.Inventory()
942
self.add_file(repo, inv, 'file1', b'rev2', [b'rev1a', b'rev1b'])
943
self.add_file(repo, inv, 'file2', b'rev2', [])
944
self.add_revision(repo, b'rev2', inv, [b'rev1a'])
920
self.add_file(repo, inv, 'file1', 'rev2', ['rev1a', 'rev1b'])
921
self.add_file(repo, inv, 'file2', 'rev2', [])
922
self.add_revision(repo, 'rev2', inv, ['rev1a'])
946
924
# make ghost revision rev1c
947
925
inv = inventory.Inventory()
948
self.add_file(repo, inv, 'file2', b'rev1c', [])
926
self.add_file(repo, inv, 'file2', 'rev1c', [])
950
928
# make rev3 with file2
951
929
# file2 refers to 'rev1c', which is a ghost in this repository, so
952
930
# file2 cannot have rev1c as its ancestor.
953
931
inv = inventory.Inventory()
954
self.add_file(repo, inv, 'file2', b'rev3', [b'rev1c'])
955
self.add_revision(repo, b'rev3', inv, [b'rev1c'])
932
self.add_file(repo, inv, 'file2', 'rev3', ['rev1c'])
933
self.add_revision(repo, 'rev3', inv, ['rev1c'])
958
936
for cleanup in reversed(cleanups):
963
941
inv.root.revision = revision_id
964
942
repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
965
943
repo.add_inventory(revision_id, inv, parent_ids)
966
revision = _mod_revision.Revision(
944
revision = _mod_revision.Revision(revision_id,
968
945
committer='jrandom@example.com', timestamp=0, inventory_sha1='',
969
946
timezone=0, message='foo', parent_ids=parent_ids)
970
947
repo.add_revision(revision_id, revision, inv)
972
949
def add_file(self, repo, inv, filename, revision, parents):
973
file_id = filename.encode('utf-8') + b'-id'
974
content = [b'line\n']
975
entry = inventory.InventoryFile(file_id, filename, b'TREE_ROOT')
950
file_id = filename + '-id'
951
entry = inventory.InventoryFile(file_id, filename, 'TREE_ROOT')
976
952
entry.revision = revision
977
entry.text_sha1 = osutils.sha_strings(content)
978
953
entry.text_size = 0
980
955
text_key = (file_id, revision)
981
956
parent_keys = [(file_id, parent) for parent in parents]
982
repo.texts.add_lines(text_key, parent_keys, content)
957
repo.texts.add_lines(text_key, parent_keys, ['line\n'])
984
959
def test_insert_from_broken_repo(self):
985
960
"""Inserting a data stream from a broken repository won't silently
1134
1109
def test_pack_distribution_one_to_nine(self):
1135
1110
packs = self.get_packs()
1136
1111
self.assertEqual([1],
1137
packs.pack_distribution(1))
1112
packs.pack_distribution(1))
1138
1113
self.assertEqual([1, 1],
1139
packs.pack_distribution(2))
1114
packs.pack_distribution(2))
1140
1115
self.assertEqual([1, 1, 1],
1141
packs.pack_distribution(3))
1116
packs.pack_distribution(3))
1142
1117
self.assertEqual([1, 1, 1, 1],
1143
packs.pack_distribution(4))
1118
packs.pack_distribution(4))
1144
1119
self.assertEqual([1, 1, 1, 1, 1],
1145
packs.pack_distribution(5))
1120
packs.pack_distribution(5))
1146
1121
self.assertEqual([1, 1, 1, 1, 1, 1],
1147
packs.pack_distribution(6))
1122
packs.pack_distribution(6))
1148
1123
self.assertEqual([1, 1, 1, 1, 1, 1, 1],
1149
packs.pack_distribution(7))
1124
packs.pack_distribution(7))
1150
1125
self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1],
1151
packs.pack_distribution(8))
1126
packs.pack_distribution(8))
1152
1127
self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1, 1],
1153
packs.pack_distribution(9))
1128
packs.pack_distribution(9))
1155
1130
def test_pack_distribution_stable_at_boundaries(self):
1156
1131
"""When there are multi-rev packs the counts are stable."""
1318
1291
packs._remove_pack_from_memory(removed_pack)
1319
1292
names = packs.names()
1320
1293
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1321
new_names = {x[0] for x in new_nodes}
1322
self.assertEqual(names, sorted([x[0] for x in all_nodes]))
1294
new_names = {x[0][0] for x in new_nodes}
1295
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1323
1296
self.assertEqual(set(names) - set(orig_names), new_names)
1324
1297
self.assertEqual({new_pack.name}, new_names)
1325
1298
self.assertEqual([to_remove_name],
1326
sorted([x[0] for x in deleted_nodes]))
1299
sorted([x[0][0] for x in deleted_nodes]))
1327
1300
packs.reload_pack_names()
1328
1301
reloaded_names = packs.names()
1329
1302
self.assertEqual(orig_at_load, packs._packs_at_load)
1330
1303
self.assertEqual(names, reloaded_names)
1331
1304
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1332
new_names = {x[0] for x in new_nodes}
1333
self.assertEqual(names, sorted([x[0] for x in all_nodes]))
1305
new_names = {x[0][0] for x in new_nodes}
1306
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1334
1307
self.assertEqual(set(names) - set(orig_names), new_names)
1335
1308
self.assertEqual({new_pack.name}, new_names)
1336
1309
self.assertEqual([to_remove_name],
1337
sorted([x[0] for x in deleted_nodes]))
1310
sorted([x[0][0] for x in deleted_nodes]))
1339
1312
def test_autopack_obsoletes_new_pack(self):
1340
1313
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1495
1467
def test_pack_optimizes_pack_order(self):
1496
1468
builder = self.make_branch_builder('.', format="1.9")
1497
1469
builder.start_series()
1498
builder.build_snapshot(None, [
1499
('add', ('', b'root-id', 'directory', None)),
1500
('add', ('f', b'f-id', 'file', b'content\n'))],
1502
builder.build_snapshot([b'A'],
1503
[('modify', ('f', b'new-content\n'))],
1505
builder.build_snapshot([b'B'],
1506
[('modify', ('f', b'third-content\n'))],
1508
builder.build_snapshot([b'C'],
1509
[('modify', ('f', b'fourth-content\n'))],
1470
builder.build_snapshot('A', None, [
1471
('add', ('', 'root-id', 'directory', None)),
1472
('add', ('f', 'f-id', 'file', 'content\n'))])
1473
builder.build_snapshot('B', ['A'],
1474
[('modify', ('f-id', 'new-content\n'))])
1475
builder.build_snapshot('C', ['B'],
1476
[('modify', ('f-id', 'third-content\n'))])
1477
builder.build_snapshot('D', ['C'],
1478
[('modify', ('f-id', 'fourth-content\n'))])
1511
1479
b = builder.get_branch()
1513
1481
builder.finish_series()
1552
1520
def make_abc_branch(self):
1553
1521
builder = self.make_branch_builder('source')
1554
1522
builder.start_series()
1555
builder.build_snapshot(None, [
1556
('add', ('', b'root-id', 'directory', None)),
1557
('add', ('file', b'file-id', 'file', b'content\n')),
1558
], revision_id=b'A')
1559
builder.build_snapshot([b'A'], [
1560
('add', ('dir', b'dir-id', 'directory', None))],
1562
builder.build_snapshot([b'B'], [
1563
('modify', ('file', b'new content\n'))],
1523
builder.build_snapshot('A', None, [
1524
('add', ('', 'root-id', 'directory', None)),
1525
('add', ('file', 'file-id', 'file', 'content\n')),
1527
builder.build_snapshot('B', ['A'], [
1528
('add', ('dir', 'dir-id', 'directory', None))])
1529
builder.build_snapshot('C', ['B'], [
1530
('modify', ('file-id', 'new content\n'))])
1565
1531
builder.finish_series()
1566
1532
return builder.get_branch()
1577
1543
pack_name_with_rev_C_content)
1579
1545
b_source = self.make_abc_branch()
1580
b_base = b_source.controldir.sprout(
1581
'base', revision_id=b'A').open_branch()
1582
b_stacked = b_base.controldir.sprout(
1583
'stacked', stacked=True).open_branch()
1546
b_base = b_source.bzrdir.sprout('base', revision_id='A').open_branch()
1547
b_stacked = b_base.bzrdir.sprout('stacked', stacked=True).open_branch()
1584
1548
b_stacked.lock_write()
1585
1549
self.addCleanup(b_stacked.unlock)
1586
b_stacked.fetch(b_source, b'B')
1550
b_stacked.fetch(b_source, 'B')
1587
1551
# Now re-open the stacked repo directly (no fallbacks) so that we can
1588
1552
# fill in the A rev.
1589
repo_not_stacked = b_stacked.controldir.open_repository()
1553
repo_not_stacked = b_stacked.bzrdir.open_repository()
1590
1554
repo_not_stacked.lock_write()
1591
1555
self.addCleanup(repo_not_stacked.unlock)
1592
1556
# Now we should have a pack file with A's inventory, but not its
1594
self.assertEqual([(b'A',), (b'B',)],
1558
self.assertEqual([('A',), ('B',)],
1595
1559
sorted(repo_not_stacked.inventories.keys()))
1596
self.assertEqual([(b'B',)],
1560
self.assertEqual([('B',)],
1597
1561
sorted(repo_not_stacked.revisions.keys()))
1598
1562
stacked_pack_names = repo_not_stacked._pack_collection.names()
1599
1563
# We have a couple names here, figure out which has A's inventory
1600
1564
for name in stacked_pack_names:
1601
1565
pack = repo_not_stacked._pack_collection.get_pack_by_name(name)
1602
1566
keys = [n[1] for n in pack.inventory_index.iter_all_entries()]
1604
1568
inv_a_pack_name = name
1607
1571
self.fail('Could not find pack containing A\'s inventory')
1608
repo_not_stacked.fetch(b_source.repository, b'A')
1609
self.assertEqual([(b'A',), (b'B',)],
1572
repo_not_stacked.fetch(b_source.repository, 'A')
1573
self.assertEqual([('A',), ('B',)],
1610
1574
sorted(repo_not_stacked.revisions.keys()))
1611
1575
new_pack_names = set(repo_not_stacked._pack_collection.names())
1612
1576
rev_a_pack_names = new_pack_names.difference(stacked_pack_names)
1613
1577
self.assertEqual(1, len(rev_a_pack_names))
1614
1578
rev_a_pack_name = list(rev_a_pack_names)[0]
1615
1579
# Now fetch 'C', so we have a couple pack files to join
1616
repo_not_stacked.fetch(b_source.repository, b'C')
1580
repo_not_stacked.fetch(b_source.repository, 'C')
1617
1581
rev_c_pack_names = set(repo_not_stacked._pack_collection.names())
1618
1582
rev_c_pack_names = rev_c_pack_names.difference(new_pack_names)
1619
1583
self.assertEqual(1, len(rev_c_pack_names))