85
88
result = dir.create_repository()
86
89
self.assertEqual(result, 'A bzr repository dir')
88
bzrdir.format_registry.remove('default')
89
bzrdir.format_registry.remove('sample')
90
bzrdir.format_registry.register('default', old_default, '')
91
self.assertIsInstance(repository.RepositoryFormat.get_default_format(),
91
controldir.format_registry.remove('default')
92
controldir.format_registry.remove('sample')
93
controldir.format_registry.register('default', old_default, '')
94
self.assertIsInstance(repository.format_registry.get_default(),
92
95
old_format.__class__)
95
class SampleRepositoryFormat(repository.RepositoryFormat):
98
class SampleRepositoryFormat(bzrrepository.RepositoryFormatMetaDir):
98
101
this format is initializable, unsupported to aid in testing the
99
102
open and open(unsupported=True) routines.
102
def get_format_string(self):
106
def get_format_string(cls):
103
107
"""See RepositoryFormat.get_format_string()."""
104
return "Sample .bzr repository format."
108
return b"Sample .bzr repository format."
106
def initialize(self, a_bzrdir, shared=False):
110
def initialize(self, a_controldir, shared=False):
107
111
"""Initialize a repository in a BzrDir"""
108
t = a_bzrdir.get_repository_transport(self)
112
t = a_controldir.get_repository_transport(self)
109
113
t.put_bytes('format', self.get_format_string())
110
114
return 'A bzr repository dir'
112
116
def is_supported(self):
115
def open(self, a_bzrdir, _found=False):
119
def open(self, a_controldir, _found=False):
116
120
return "opened repository."
123
class SampleExtraRepositoryFormat(repository.RepositoryFormat):
124
"""A sample format that can not be used in a metadir
128
def get_format_string(self):
129
raise NotImplementedError
119
132
class TestRepositoryFormat(TestCaseWithTransport):
120
133
"""Tests for the Repository format detection used by the bzr meta dir facility.BzrBranchFormat facility."""
125
138
# this is not quite the same as
126
139
self.build_tree(["foo/", "bar/"])
127
140
def check_format(format, url):
128
dir = format._matchingbzrdir.initialize(url)
141
dir = format._matchingcontroldir.initialize(url)
129
142
format.initialize(dir)
130
t = get_transport(url)
131
found_format = repository.RepositoryFormat.find_format(dir)
132
self.failUnless(isinstance(found_format, format.__class__))
133
check_format(weaverepo.RepositoryFormat7(), "bar")
143
t = transport.get_transport_from_path(url)
144
found_format = bzrrepository.RepositoryFormatMetaDir.find_format(dir)
145
self.assertIsInstance(found_format, format.__class__)
146
check_format(repository.format_registry.get_default(), "bar")
135
148
def test_find_format_no_repository(self):
136
149
dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
137
150
self.assertRaises(errors.NoRepositoryPresent,
138
repository.RepositoryFormat.find_format,
151
bzrrepository.RepositoryFormatMetaDir.find_format,
154
def test_from_string(self):
155
self.assertIsInstance(
156
SampleRepositoryFormat.from_string(
157
b"Sample .bzr repository format."),
158
SampleRepositoryFormat)
159
self.assertRaises(AssertionError,
160
SampleRepositoryFormat.from_string,
161
b"Different .bzr repository format.")
141
163
def test_find_format_unknown_format(self):
142
164
dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
143
165
SampleRepositoryFormat().initialize(dir)
144
166
self.assertRaises(UnknownFormatError,
145
repository.RepositoryFormat.find_format,
167
bzrrepository.RepositoryFormatMetaDir.find_format,
170
def test_find_format_with_features(self):
171
tree = self.make_branch_and_tree('.', format='2a')
172
tree.branch.repository.update_feature_flags({b"name": b"necessity"})
173
found_format = bzrrepository.RepositoryFormatMetaDir.find_format(tree.controldir)
174
self.assertIsInstance(found_format, bzrrepository.RepositoryFormatMetaDir)
175
self.assertEqual(found_format.features.get(b"name"), b"necessity")
176
self.assertRaises(bzrdir.MissingFeature, found_format.check_support_status,
178
self.addCleanup(bzrrepository.RepositoryFormatMetaDir.unregister_feature,
180
bzrrepository.RepositoryFormatMetaDir.register_feature(b"name")
181
found_format.check_support_status(True)
184
class TestRepositoryFormatRegistry(TestCase):
187
super(TestRepositoryFormatRegistry, self).setUp()
188
self.registry = repository.RepositoryFormatRegistry()
148
190
def test_register_unregister_format(self):
149
191
format = SampleRepositoryFormat()
151
dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
153
format.initialize(dir)
154
# register a format for it.
155
repository.RepositoryFormat.register_format(format)
156
# which repository.Open will refuse (not supported)
157
self.assertRaises(UnsupportedFormatError, repository.Repository.open, self.get_url())
158
# but open(unsupported) will work
159
self.assertEqual(format.open(dir), "opened repository.")
160
# unregister the format
161
repository.RepositoryFormat.unregister_format(format)
164
class TestFormat6(TestCaseWithTransport):
166
def test_attribute__fetch_order(self):
167
"""Weaves need topological data insertion."""
168
control = bzrdir.BzrDirFormat6().initialize(self.get_url())
169
repo = weaverepo.RepositoryFormat6().initialize(control)
170
self.assertEqual('topological', repo._format._fetch_order)
172
def test_attribute__fetch_uses_deltas(self):
173
"""Weaves do not reuse deltas."""
174
control = bzrdir.BzrDirFormat6().initialize(self.get_url())
175
repo = weaverepo.RepositoryFormat6().initialize(control)
176
self.assertEqual(False, repo._format._fetch_uses_deltas)
178
def test_attribute__fetch_reconcile(self):
179
"""Weave repositories need a reconcile after fetch."""
180
control = bzrdir.BzrDirFormat6().initialize(self.get_url())
181
repo = weaverepo.RepositoryFormat6().initialize(control)
182
self.assertEqual(True, repo._format._fetch_reconcile)
184
def test_no_ancestry_weave(self):
185
control = bzrdir.BzrDirFormat6().initialize(self.get_url())
186
repo = weaverepo.RepositoryFormat6().initialize(control)
187
# We no longer need to create the ancestry.weave file
188
# since it is *never* used.
189
self.assertRaises(NoSuchFile,
190
control.transport.get,
193
def test_supports_external_lookups(self):
194
control = bzrdir.BzrDirFormat6().initialize(self.get_url())
195
repo = weaverepo.RepositoryFormat6().initialize(control)
196
self.assertFalse(repo._format.supports_external_lookups)
199
class TestFormat7(TestCaseWithTransport):
201
def test_attribute__fetch_order(self):
202
"""Weaves need topological data insertion."""
203
control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
204
repo = weaverepo.RepositoryFormat7().initialize(control)
205
self.assertEqual('topological', repo._format._fetch_order)
207
def test_attribute__fetch_uses_deltas(self):
208
"""Weaves do not reuse deltas."""
209
control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
210
repo = weaverepo.RepositoryFormat7().initialize(control)
211
self.assertEqual(False, repo._format._fetch_uses_deltas)
213
def test_attribute__fetch_reconcile(self):
214
"""Weave repositories need a reconcile after fetch."""
215
control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
216
repo = weaverepo.RepositoryFormat7().initialize(control)
217
self.assertEqual(True, repo._format._fetch_reconcile)
219
def test_disk_layout(self):
220
control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
221
repo = weaverepo.RepositoryFormat7().initialize(control)
222
# in case of side effects of locking.
226
# format 'Bazaar-NG Repository format 7'
228
# inventory.weave == empty_weave
229
# empty revision-store directory
230
# empty weaves directory
231
t = control.get_repository_transport(None)
232
self.assertEqualDiff('Bazaar-NG Repository format 7',
233
t.get('format').read())
234
self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
235
self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
236
self.assertEqualDiff('# bzr weave file v5\n'
239
t.get('inventory.weave').read())
240
# Creating a file with id Foo:Bar results in a non-escaped file name on
242
control.create_branch()
243
tree = control.create_workingtree()
244
tree.add(['foo'], ['Foo:Bar'], ['file'])
245
tree.put_file_bytes_non_atomic('Foo:Bar', 'content\n')
247
tree.commit('first post', rev_id='first')
248
except errors.IllegalPath:
249
if sys.platform != 'win32':
251
self.knownFailure('Foo:Bar cannot be used as a file-id on windows'
254
self.assertEqualDiff(
255
'# bzr weave file v5\n'
257
'1 7fe70820e08a1aac0ef224d9c66ab66831cc4ab1\n'
265
t.get('weaves/74/Foo%3ABar.weave').read())
267
def test_shared_disk_layout(self):
268
control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
269
repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
271
# format 'Bazaar-NG Repository format 7'
272
# inventory.weave == empty_weave
273
# empty revision-store directory
274
# empty weaves directory
275
# a 'shared-storage' marker file.
276
# lock is not present when unlocked
277
t = control.get_repository_transport(None)
278
self.assertEqualDiff('Bazaar-NG Repository format 7',
279
t.get('format').read())
280
self.assertEqualDiff('', t.get('shared-storage').read())
281
self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
282
self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
283
self.assertEqualDiff('# bzr weave file v5\n'
286
t.get('inventory.weave').read())
287
self.assertFalse(t.has('branch-lock'))
289
def test_creates_lockdir(self):
290
"""Make sure it appears to be controlled by a LockDir existence"""
291
control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
292
repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
293
t = control.get_repository_transport(None)
294
# TODO: Should check there is a 'lock' toplevel directory,
295
# regardless of contents
296
self.assertFalse(t.has('lock/held/info'))
299
self.assertTrue(t.has('lock/held/info'))
301
# unlock so we don't get a warning about failing to do so
304
def test_uses_lockdir(self):
305
"""repo format 7 actually locks on lockdir"""
306
base_url = self.get_url()
307
control = bzrdir.BzrDirMetaFormat1().initialize(base_url)
308
repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
309
t = control.get_repository_transport(None)
313
# make sure the same lock is created by opening it
314
repo = repository.Repository.open(base_url)
316
self.assertTrue(t.has('lock/held/info'))
318
self.assertFalse(t.has('lock/held/info'))
320
def test_shared_no_tree_disk_layout(self):
321
control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
322
repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
323
repo.set_make_working_trees(False)
325
# format 'Bazaar-NG Repository format 7'
327
# inventory.weave == empty_weave
328
# empty revision-store directory
329
# empty weaves directory
330
# a 'shared-storage' marker file.
331
t = control.get_repository_transport(None)
332
self.assertEqualDiff('Bazaar-NG Repository format 7',
333
t.get('format').read())
334
## self.assertEqualDiff('', t.get('lock').read())
335
self.assertEqualDiff('', t.get('shared-storage').read())
336
self.assertEqualDiff('', t.get('no-working-trees').read())
337
repo.set_make_working_trees(True)
338
self.assertFalse(t.has('no-working-trees'))
339
self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
340
self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
341
self.assertEqualDiff('# bzr weave file v5\n'
344
t.get('inventory.weave').read())
346
def test_supports_external_lookups(self):
347
control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
348
repo = weaverepo.RepositoryFormat7().initialize(control)
349
self.assertFalse(repo._format.supports_external_lookups)
192
self.registry.register(format)
193
self.assertEqual(format, self.registry.get(b"Sample .bzr repository format."))
194
self.registry.remove(format)
195
self.assertRaises(KeyError, self.registry.get, b"Sample .bzr repository format.")
197
def test_get_all(self):
198
format = SampleRepositoryFormat()
199
self.assertEqual([], self.registry._get_all())
200
self.registry.register(format)
201
self.assertEqual([format], self.registry._get_all())
203
def test_register_extra(self):
204
format = SampleExtraRepositoryFormat()
205
self.assertEqual([], self.registry._get_all())
206
self.registry.register_extra(format)
207
self.assertEqual([format], self.registry._get_all())
209
def test_register_extra_lazy(self):
210
self.assertEqual([], self.registry._get_all())
211
self.registry.register_extra_lazy("breezy.tests.test_repository",
212
"SampleExtraRepositoryFormat")
213
formats = self.registry._get_all()
214
self.assertEqual(1, len(formats))
215
self.assertIsInstance(formats[0], SampleExtraRepositoryFormat)
352
218
class TestFormatKnit1(TestCaseWithTransport):
571
443
self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
574
class TestInterWeaveRepo(TestCaseWithTransport):
576
def test_is_compatible_and_registered(self):
577
# InterWeaveRepo is compatible when either side
578
# is a format 5/6/7 branch
579
from bzrlib.repofmt import knitrepo, weaverepo
580
formats = [weaverepo.RepositoryFormat5(),
581
weaverepo.RepositoryFormat6(),
582
weaverepo.RepositoryFormat7()]
583
incompatible_formats = [weaverepo.RepositoryFormat4(),
584
knitrepo.RepositoryFormatKnit1(),
586
repo_a = self.make_repository('a')
587
repo_b = self.make_repository('b')
588
is_compatible = repository.InterWeaveRepo.is_compatible
589
for source in incompatible_formats:
590
# force incompatible left then right
591
repo_a._format = source
592
repo_b._format = formats[0]
593
self.assertFalse(is_compatible(repo_a, repo_b))
594
self.assertFalse(is_compatible(repo_b, repo_a))
595
for source in formats:
596
repo_a._format = source
597
for target in formats:
598
repo_b._format = target
599
self.assertTrue(is_compatible(repo_a, repo_b))
600
self.assertEqual(repository.InterWeaveRepo,
601
repository.InterRepository.get(repo_a,
446
class TestRepositoryFormat1(knitrepo.RepositoryFormatKnit1):
449
def get_format_string(cls):
450
return b"Test Format 1"
453
class TestRepositoryFormat2(knitrepo.RepositoryFormatKnit1):
456
def get_format_string(cls):
457
return b"Test Format 2"
605
460
class TestRepositoryConverter(TestCaseWithTransport):
607
462
def test_convert_empty(self):
608
t = get_transport(self.get_url('.'))
463
source_format = TestRepositoryFormat1()
464
target_format = TestRepositoryFormat2()
465
repository.format_registry.register(source_format)
466
self.addCleanup(repository.format_registry.remove,
468
repository.format_registry.register(target_format)
469
self.addCleanup(repository.format_registry.remove,
471
t = self.get_transport()
609
472
t.mkdir('repository')
610
473
repo_dir = bzrdir.BzrDirMetaFormat1().initialize('repository')
611
repo = weaverepo.RepositoryFormat7().initialize(repo_dir)
612
target_format = knitrepo.RepositoryFormatKnit1()
474
repo = TestRepositoryFormat1().initialize(repo_dir)
613
475
converter = repository.CopyConverter(target_format)
614
pb = bzrlib.ui.ui_factory.nested_progress_bar()
476
with breezy.ui.ui_factory.nested_progress_bar() as pb:
616
477
converter.convert(repo, pb)
619
478
repo = repo_dir.open_repository()
620
479
self.assertTrue(isinstance(target_format, repo._format.__class__))
623
class TestMisc(TestCase):
625
def test_unescape_xml(self):
626
"""We get some kind of error when malformed entities are passed"""
627
self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;')
630
482
class TestRepositoryFormatKnit3(TestCaseWithTransport):
632
484
def test_attribute__fetch_order(self):
682
535
class Test2a(tests.TestCaseWithMemoryTransport):
684
def test_fetch_combines_groups(self):
685
builder = self.make_branch_builder('source', format='2a')
686
builder.start_series()
687
builder.build_snapshot('1', None, [
688
('add', ('', 'root-id', 'directory', '')),
689
('add', ('file', 'file-id', 'file', 'content\n'))])
690
builder.build_snapshot('2', ['1'], [
691
('modify', ('file-id', 'content-2\n'))])
692
builder.finish_series()
693
source = builder.get_branch()
694
target = self.make_repository('target', format='2a')
695
target.fetch(source.repository)
697
self.addCleanup(target.unlock)
698
details = target.texts._index.get_build_details(
699
[('file-id', '1',), ('file-id', '2',)])
700
file_1_details = details[('file-id', '1')]
701
file_2_details = details[('file-id', '2')]
702
# The index, and what to read off disk, should be the same for both
703
# versions of the file.
704
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
706
def test_fetch_combines_groups(self):
707
builder = self.make_branch_builder('source', format='2a')
708
builder.start_series()
709
builder.build_snapshot('1', None, [
710
('add', ('', 'root-id', 'directory', '')),
711
('add', ('file', 'file-id', 'file', 'content\n'))])
712
builder.build_snapshot('2', ['1'], [
713
('modify', ('file-id', 'content-2\n'))])
714
builder.finish_series()
715
source = builder.get_branch()
716
target = self.make_repository('target', format='2a')
717
target.fetch(source.repository)
719
self.addCleanup(target.unlock)
720
details = target.texts._index.get_build_details(
721
[('file-id', '1',), ('file-id', '2',)])
722
file_1_details = details[('file-id', '1')]
723
file_2_details = details[('file-id', '2')]
724
# The index, and what to read off disk, should be the same for both
725
# versions of the file.
726
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
728
def test_fetch_combines_groups(self):
729
builder = self.make_branch_builder('source', format='2a')
730
builder.start_series()
731
builder.build_snapshot('1', None, [
732
('add', ('', 'root-id', 'directory', '')),
733
('add', ('file', 'file-id', 'file', 'content\n'))])
734
builder.build_snapshot('2', ['1'], [
735
('modify', ('file-id', 'content-2\n'))])
736
builder.finish_series()
737
source = builder.get_branch()
738
target = self.make_repository('target', format='2a')
739
target.fetch(source.repository)
741
self.addCleanup(target.unlock)
742
details = target.texts._index.get_build_details(
743
[('file-id', '1',), ('file-id', '2',)])
744
file_1_details = details[('file-id', '1')]
745
file_2_details = details[('file-id', '2')]
537
def test_chk_bytes_uses_custom_btree_parser(self):
538
mt = self.make_branch_and_memory_tree('test', format='2a')
540
self.addCleanup(mt.unlock)
541
mt.add([''], [b'root-id'])
543
index = mt.branch.repository.chk_bytes._index._graph_index._indices[0]
544
self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
545
# It should also work if we re-open the repo
546
repo = mt.branch.repository.controldir.open_repository()
548
self.addCleanup(repo.unlock)
549
index = repo.chk_bytes._index._graph_index._indices[0]
550
self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
552
def test_fetch_combines_groups(self):
553
builder = self.make_branch_builder('source', format='2a')
554
builder.start_series()
555
builder.build_snapshot(None, [
556
('add', ('', b'root-id', 'directory', '')),
557
('add', ('file', b'file-id', 'file', b'content\n'))],
559
builder.build_snapshot([b'1'], [
560
('modify', ('file', b'content-2\n'))],
562
builder.finish_series()
563
source = builder.get_branch()
564
target = self.make_repository('target', format='2a')
565
target.fetch(source.repository)
567
self.addCleanup(target.unlock)
568
details = target.texts._index.get_build_details(
569
[(b'file-id', b'1',), (b'file-id', b'2',)])
570
file_1_details = details[(b'file-id', b'1')]
571
file_2_details = details[(b'file-id', b'2')]
572
# The index, and what to read off disk, should be the same for both
573
# versions of the file.
574
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
576
def test_fetch_combines_groups(self):
577
builder = self.make_branch_builder('source', format='2a')
578
builder.start_series()
579
builder.build_snapshot(None, [
580
('add', ('', b'root-id', 'directory', '')),
581
('add', ('file', b'file-id', 'file', 'content\n'))],
583
builder.build_snapshot([b'1'], [
584
('modify', ('file', b'content-2\n'))],
586
builder.finish_series()
587
source = builder.get_branch()
588
target = self.make_repository('target', format='2a')
589
target.fetch(source.repository)
591
self.addCleanup(target.unlock)
592
details = target.texts._index.get_build_details(
593
[(b'file-id', b'1',), (b'file-id', b'2',)])
594
file_1_details = details[(b'file-id', b'1')]
595
file_2_details = details[(b'file-id', b'2')]
596
# The index, and what to read off disk, should be the same for both
597
# versions of the file.
598
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
600
def test_fetch_combines_groups(self):
601
builder = self.make_branch_builder('source', format='2a')
602
builder.start_series()
603
builder.build_snapshot(None, [
604
('add', ('', b'root-id', 'directory', '')),
605
('add', ('file', b'file-id', 'file', 'content\n'))],
607
builder.build_snapshot([b'1'], [
608
('modify', ('file', b'content-2\n'))],
610
builder.finish_series()
611
source = builder.get_branch()
612
target = self.make_repository('target', format='2a')
613
target.fetch(source.repository)
615
self.addCleanup(target.unlock)
616
details = target.texts._index.get_build_details(
617
[(b'file-id', b'1',), (b'file-id', b'2',)])
618
file_1_details = details[(b'file-id', b'1')]
619
file_2_details = details[(b'file-id', b'2')]
746
620
# The index, and what to read off disk, should be the same for both
747
621
# versions of the file.
748
622
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
812
686
target = self.make_repository('target', format='rich-root-pack')
813
687
stream = source._get_source(target._format)
814
688
# We don't want the child GroupCHKStreamSource
815
self.assertIs(type(stream), repository.StreamSource)
689
self.assertIs(type(stream), vf_repository.StreamSource)
817
691
def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
818
692
source_builder = self.make_branch_builder('source',
820
694
# We have to build a fairly large tree, so that we are sure the chk
821
695
# pages will have split into multiple pages.
822
entries = [('add', ('', 'a-root-id', 'directory', None))]
696
entries = [('add', ('', b'a-root-id', 'directory', None))]
823
697
for i in 'abcdefghijklmnopqrstuvwxyz123456789':
824
698
for j in 'abcdefghijklmnopqrstuvwxyz123456789':
700
fid = fname.encode('utf-8') + b'-id'
827
701
content = 'content for %s\n' % (fname,)
828
702
entries.append(('add', (fname, fid, 'file', content)))
829
703
source_builder.start_series()
830
source_builder.build_snapshot('rev-1', None, entries)
704
source_builder.build_snapshot(None, entries, revision_id=b'rev-1')
831
705
# Now change a few of them, so we get a few new pages for the second
833
source_builder.build_snapshot('rev-2', ['rev-1'], [
834
('modify', ('aa-id', 'new content for aa-id\n')),
835
('modify', ('cc-id', 'new content for cc-id\n')),
836
('modify', ('zz-id', 'new content for zz-id\n')),
707
source_builder.build_snapshot([b'rev-1'], [
708
('modify', ('aa', b'new content for aa-id\n')),
709
('modify', ('cc', b'new content for cc-id\n')),
710
('modify', ('zz', b'new content for zz-id\n')),
711
], revision_id=b'rev-2')
838
712
source_builder.finish_series()
839
713
source_branch = source_builder.get_branch()
840
714
source_branch.lock_read()
986
860
self.assertParentIds([], rev_set)
988
862
def test_not_null_set(self):
989
self.builder.build_snapshot('revid1', None, [])
863
self.builder.build_snapshot(None, [], revision_id='revid1')
990
864
rev_set = [_mod_revision.NULL_REVISION]
991
865
self.assertParentIds([], rev_set)
993
867
def test_ghost(self):
994
self.builder.build_snapshot('revid1', None, [])
868
self.builder.build_snapshot(None, [], revision_id='revid1')
995
869
rev_set = ['ghost', 'revid1']
996
870
self.assertParentIds(['initial'], rev_set)
998
872
def test_ghost_parent(self):
999
self.builder.build_snapshot('revid1', None, [])
1000
self.builder.build_snapshot('revid2', ['revid1', 'ghost'], [])
873
self.builder.build_snapshot(None, [], revision_id='revid1')
874
self.builder.build_snapshot(['revid1', 'ghost'], [], revision_id='revid2')
1001
875
rev_set = ['revid2', 'revid1']
1002
876
self.assertParentIds(['ghost', 'initial'], rev_set)
1004
878
def test_righthand_parent(self):
1005
self.builder.build_snapshot('revid1', None, [])
1006
self.builder.build_snapshot('revid2a', ['revid1'], [])
1007
self.builder.build_snapshot('revid2b', ['revid1'], [])
1008
self.builder.build_snapshot('revid3', ['revid2a', 'revid2b'], [])
879
self.builder.build_snapshot(None, [], revision_id='revid1')
880
self.builder.build_snapshot(['revid1'], [], revision_id='revid2a')
881
self.builder.build_snapshot(['revid1'], [], revision_id='revid2b')
882
self.builder.build_snapshot(['revid2a', 'revid2b'], [],
883
revision_id='revid3')
1009
884
rev_set = ['revid3', 'revid2a']
1010
885
self.assertParentIds(['revid1', 'revid2b'], rev_set)
1622
1524
self.assertTrue(new_pack.signature_index._optimize_for_size)
1527
class TestGCCHKPacker(TestCaseWithTransport):
1529
def make_abc_branch(self):
1530
builder = self.make_branch_builder('source')
1531
builder.start_series()
1532
builder.build_snapshot(None, [
1533
('add', ('', 'root-id', 'directory', None)),
1534
('add', ('file', 'file-id', 'file', 'content\n')),
1536
builder.build_snapshot(['A'], [
1537
('add', ('dir', 'dir-id', 'directory', None))],
1539
builder.build_snapshot(['B'], [
1540
('modify', ('file', 'new content\n'))],
1542
builder.finish_series()
1543
return builder.get_branch()
1545
def make_branch_with_disjoint_inventory_and_revision(self):
1546
"""a repo with separate packs for a revisions Revision and Inventory.
1548
There will be one pack file that holds the Revision content, and one
1549
for the Inventory content.
1551
:return: (repository,
1552
pack_name_with_rev_A_Revision,
1553
pack_name_with_rev_A_Inventory,
1554
pack_name_with_rev_C_content)
1556
b_source = self.make_abc_branch()
1557
b_base = b_source.controldir.sprout('base', revision_id='A').open_branch()
1558
b_stacked = b_base.controldir.sprout('stacked', stacked=True).open_branch()
1559
b_stacked.lock_write()
1560
self.addCleanup(b_stacked.unlock)
1561
b_stacked.fetch(b_source, 'B')
1562
# Now re-open the stacked repo directly (no fallbacks) so that we can
1563
# fill in the A rev.
1564
repo_not_stacked = b_stacked.controldir.open_repository()
1565
repo_not_stacked.lock_write()
1566
self.addCleanup(repo_not_stacked.unlock)
1567
# Now we should have a pack file with A's inventory, but not its
1569
self.assertEqual([('A',), ('B',)],
1570
sorted(repo_not_stacked.inventories.keys()))
1571
self.assertEqual([('B',)],
1572
sorted(repo_not_stacked.revisions.keys()))
1573
stacked_pack_names = repo_not_stacked._pack_collection.names()
1574
# We have a couple names here, figure out which has A's inventory
1575
for name in stacked_pack_names:
1576
pack = repo_not_stacked._pack_collection.get_pack_by_name(name)
1577
keys = [n[1] for n in pack.inventory_index.iter_all_entries()]
1579
inv_a_pack_name = name
1582
self.fail('Could not find pack containing A\'s inventory')
1583
repo_not_stacked.fetch(b_source.repository, 'A')
1584
self.assertEqual([('A',), ('B',)],
1585
sorted(repo_not_stacked.revisions.keys()))
1586
new_pack_names = set(repo_not_stacked._pack_collection.names())
1587
rev_a_pack_names = new_pack_names.difference(stacked_pack_names)
1588
self.assertEqual(1, len(rev_a_pack_names))
1589
rev_a_pack_name = list(rev_a_pack_names)[0]
1590
# Now fetch 'C', so we have a couple pack files to join
1591
repo_not_stacked.fetch(b_source.repository, 'C')
1592
rev_c_pack_names = set(repo_not_stacked._pack_collection.names())
1593
rev_c_pack_names = rev_c_pack_names.difference(new_pack_names)
1594
self.assertEqual(1, len(rev_c_pack_names))
1595
rev_c_pack_name = list(rev_c_pack_names)[0]
1596
return (repo_not_stacked, rev_a_pack_name, inv_a_pack_name,
1599
def test_pack_with_distant_inventories(self):
1600
# See https://bugs.launchpad.net/bzr/+bug/437003
1601
# When repacking, it is possible to have an inventory in a different
1602
# pack file than the associated revision. An autopack can then come
1603
# along, and miss that inventory, and complain.
1604
(repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
1605
) = self.make_branch_with_disjoint_inventory_and_revision()
1606
a_pack = repo._pack_collection.get_pack_by_name(rev_a_pack_name)
1607
c_pack = repo._pack_collection.get_pack_by_name(rev_c_pack_name)
1608
packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
1609
[a_pack, c_pack], '.test-pack')
1610
# This would raise ValueError in bug #437003, but should not raise an
1614
def test_pack_with_missing_inventory(self):
1615
# Similar to test_pack_with_missing_inventory, but this time, we force
1616
# the A inventory to actually be gone from the repository.
1617
(repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
1618
) = self.make_branch_with_disjoint_inventory_and_revision()
1619
inv_a_pack = repo._pack_collection.get_pack_by_name(inv_a_pack_name)
1620
repo._pack_collection._remove_pack_from_memory(inv_a_pack)
1621
packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
1622
repo._pack_collection.all_packs(), '.test-pack')
1623
e = self.assertRaises(ValueError, packer.pack)
1624
packer.new_pack.abort()
1625
self.assertContainsRe(str(e),
1626
r"We are missing inventories for revisions: .*'A'")
1625
1629
class TestCrossFormatPacks(TestCaseWithTransport):
1627
1631
def log_pack(self, hint=None):